1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_sincos (tree);
107 static rtx expand_builtin_cexpi (tree, rtx, rtx);
108 static rtx expand_builtin_int_roundingfn (tree, rtx);
109 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
129 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
130 enum machine_mode, int);
131 static rtx expand_builtin_bcopy (tree, int);
132 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
133 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
134 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
139 static rtx expand_builtin_bzero (tree);
140 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
143 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
144 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
145 static rtx expand_builtin_alloca (tree, rtx);
146 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static rtx expand_builtin_fputs (tree, rtx, bool);
149 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
150 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
151 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
152 static tree stabilize_va_list_loc (location_t, tree, int);
153 static rtx expand_builtin_expect (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_expect (location_t, tree, tree);
156 static tree fold_builtin_classify_type (tree);
157 static tree fold_builtin_strlen (location_t, tree);
158 static tree fold_builtin_inf (location_t, tree, int);
159 static tree fold_builtin_nan (tree, tree, int);
160 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
161 static bool validate_arg (const_tree, enum tree_code code);
162 static bool integer_valued_real_p (tree);
163 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
164 static bool readonly_data_expr (tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (location_t, tree, tree);
168 static tree fold_builtin_cbrt (location_t, tree, tree);
169 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_cos (location_t, tree, tree, tree);
172 static tree fold_builtin_cosh (location_t, tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (location_t, tree, tree);
175 static tree fold_builtin_floor (location_t, tree, tree);
176 static tree fold_builtin_ceil (location_t, tree, tree);
177 static tree fold_builtin_round (location_t, tree, tree);
178 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
181 static tree fold_builtin_strchr (location_t, tree, tree, tree);
182 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
183 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
184 static tree fold_builtin_strcmp (location_t, tree, tree);
185 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
186 static tree fold_builtin_signbit (location_t, tree, tree);
187 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
188 static tree fold_builtin_isascii (location_t, tree);
189 static tree fold_builtin_toascii (location_t, tree);
190 static tree fold_builtin_isdigit (location_t, tree);
191 static tree fold_builtin_fabs (location_t, tree, tree);
192 static tree fold_builtin_abs (location_t, tree, tree);
193 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
195 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
196 static tree fold_builtin_0 (location_t, tree, bool);
197 static tree fold_builtin_1 (location_t, tree, tree, bool);
198 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
199 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
200 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
201 static tree fold_builtin_varargs (location_t, tree, tree, bool);
203 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
204 static tree fold_builtin_strstr (location_t, tree, tree, tree);
205 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
206 static tree fold_builtin_strcat (location_t, tree, tree);
207 static tree fold_builtin_strncat (location_t, tree, tree, tree);
208 static tree fold_builtin_strspn (location_t, tree, tree);
209 static tree fold_builtin_strcspn (location_t, tree, tree);
210 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
212 static rtx expand_builtin_object_size (tree);
213 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
214 enum built_in_function);
215 static void maybe_emit_chk_warning (tree, enum built_in_function);
216 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
217 static void maybe_emit_free_warning (tree);
218 static tree fold_builtin_object_size (tree, tree);
219 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
220 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
221 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
222 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
223 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
224 enum built_in_function);
225 static bool init_target_chars (void);
227 static unsigned HOST_WIDE_INT target_newline;
228 static unsigned HOST_WIDE_INT target_percent;
229 static unsigned HOST_WIDE_INT target_c;
230 static unsigned HOST_WIDE_INT target_s;
231 static char target_percent_c[3];
232 static char target_percent_s[3];
233 static char target_percent_s_newline[4];
234 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
235 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
236 static tree do_mpfr_arg2 (tree, tree, tree,
237 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
238 static tree do_mpfr_arg3 (tree, tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_sincos (tree, tree, tree);
241 static tree do_mpfr_bessel_n (tree, tree, tree,
242 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
243 const REAL_VALUE_TYPE *, bool);
244 static tree do_mpfr_remquo (tree, tree, tree);
245 static tree do_mpfr_lgamma_r (tree, tree, tree);
248 is_builtin_name (const char *name)
250 if (strncmp (name, "__builtin_", 10) == 0)
252 if (strncmp (name, "__sync_", 7) == 0)
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
262 called_as_built_in (tree node)
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
267 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
268 return is_builtin_name (name);
271 /* Return the alignment in bits of EXP, an object.
272 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
273 guessed alignment e.g. from type alignment. */
276 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
281 if (handled_component_p (exp))
283 HOST_WIDE_INT bitsize, bitpos;
285 enum machine_mode mode;
286 int unsignedp, volatilep;
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
291 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
296 if (TREE_CODE (offset) == PLUS_EXPR)
298 next_offset = TREE_OPERAND (offset, 0);
299 offset = TREE_OPERAND (offset, 1);
303 if (host_integerp (offset, 1))
305 /* Any overflow in calculating offset_bits won't change
308 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
311 inner = MIN (inner, (offset_bits & -offset_bits));
313 else if (TREE_CODE (offset) == MULT_EXPR
314 && host_integerp (TREE_OPERAND (offset, 1), 1))
316 /* Any overflow in calculating offset_factor won't change
318 unsigned offset_factor
319 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
323 inner = MIN (inner, (offset_factor & -offset_factor));
327 inner = MIN (inner, BITS_PER_UNIT);
330 offset = next_offset;
334 align = MIN (inner, DECL_ALIGN (exp));
335 #ifdef CONSTANT_ALIGNMENT
336 else if (CONSTANT_CLASS_P (exp))
337 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
339 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
340 || TREE_CODE (exp) == INDIRECT_REF)
341 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
343 align = MIN (align, inner);
344 return MIN (align, max_align);
347 /* Returns true iff we can trust that alignment information has been
348 calculated properly. */
351 can_trust_pointer_alignment (void)
353 /* We rely on TER to compute accurate alignment information. */
354 return (optimize && flag_tree_ter);
357 /* Return the alignment in bits of EXP, a pointer valued expression.
358 But don't return more than MAX_ALIGN no matter what.
359 The alignment returned is, by default, the alignment of the thing that
360 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
362 Otherwise, look at the expression to see if we can do better, i.e., if the
363 expression is actually pointing at an object whose alignment is tighter. */
366 get_pointer_alignment (tree exp, unsigned int max_align)
368 unsigned int align, inner;
370 if (!can_trust_pointer_alignment ())
373 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
376 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
377 align = MIN (align, max_align);
381 switch (TREE_CODE (exp))
384 exp = TREE_OPERAND (exp, 0);
385 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
388 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
389 align = MIN (inner, max_align);
392 case POINTER_PLUS_EXPR:
393 /* If sum of pointer + int, restrict our maximum alignment to that
394 imposed by the integer. If not, we can't do any better than
396 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
399 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
400 & (max_align / BITS_PER_UNIT - 1))
404 exp = TREE_OPERAND (exp, 0);
408 /* See what we are pointing at and look at its alignment. */
409 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
417 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
418 way, because it could contain a zero byte in the middle.
419 TREE_STRING_LENGTH is the size of the character array, not the string.
421 ONLY_VALUE should be nonzero if the result is not going to be emitted
422 into the instruction stream and zero if it is going to be expanded.
423 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
424 is returned, otherwise NULL, since
425 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
426 evaluate the side-effects.
428 The value returned is of type `ssizetype'.
430 Unfortunately, string_constant can't access the values of const char
431 arrays with initializers, so neither can we do so here. */
434 c_strlen (tree src, int only_value)
437 HOST_WIDE_INT offset;
442 if (TREE_CODE (src) == COND_EXPR
443 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
447 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
448 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
449 if (tree_int_cst_equal (len1, len2))
453 if (TREE_CODE (src) == COMPOUND_EXPR
454 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
455 return c_strlen (TREE_OPERAND (src, 1), only_value);
457 src = string_constant (src, &offset_node);
461 max = TREE_STRING_LENGTH (src) - 1;
462 ptr = TREE_STRING_POINTER (src);
464 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
466 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
467 compute the offset to the following null if we don't know where to
468 start searching for it. */
471 for (i = 0; i < max; i++)
475 /* We don't know the starting offset, but we do know that the string
476 has no internal zero bytes. We can assume that the offset falls
477 within the bounds of the string; otherwise, the programmer deserves
478 what he gets. Subtract the offset from the length of the string,
479 and return that. This would perhaps not be valid if we were dealing
480 with named arrays in addition to literal string constants. */
482 return size_diffop_loc (input_location, size_int (max), offset_node);
485 /* We have a known offset into the string. Start searching there for
486 a null character if we can represent it as a single HOST_WIDE_INT. */
487 if (offset_node == 0)
489 else if (! host_integerp (offset_node, 0))
492 offset = tree_low_cst (offset_node, 0);
494 /* If the offset is known to be out of bounds, warn, and call strlen at
496 if (offset < 0 || offset > max)
498 /* Suppress multiple warnings for propagated constant strings. */
499 if (! TREE_NO_WARNING (src))
501 warning (0, "offset outside bounds of constant string");
502 TREE_NO_WARNING (src) = 1;
507 /* Use strlen to search for the first zero byte. Since any strings
508 constructed with build_string will have nulls appended, we win even
509 if we get handed something like (char[4])"abcd".
511 Since OFFSET is our starting index into the string, no further
512 calculation is needed. */
513 return ssize_int (strlen (ptr + offset));
516 /* Return a char pointer for a C string if it is a string constant
517 or sum of string constant and integer constant. */
524 src = string_constant (src, &offset_node);
528 if (offset_node == 0)
529 return TREE_STRING_POINTER (src);
530 else if (!host_integerp (offset_node, 1)
531 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
537 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
538 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
541 c_readstr (const char *str, enum machine_mode mode)
547 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
552 for (i = 0; i < GET_MODE_SIZE (mode); i++)
555 if (WORDS_BIG_ENDIAN)
556 j = GET_MODE_SIZE (mode) - i - 1;
557 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
558 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
559 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
561 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
564 ch = (unsigned char) str[i];
565 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
567 return immed_double_const (c[0], c[1], mode);
570 /* Cast a target constant CST to target CHAR and if that value fits into
571 host char type, return zero and put that value into variable pointed to by
575 target_char_cast (tree cst, char *p)
577 unsigned HOST_WIDE_INT val, hostval;
579 if (!host_integerp (cst, 1)
580 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583 val = tree_low_cst (cst, 1);
584 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
585 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
588 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
589 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
598 /* Similar to save_expr, but assumes that arbitrary code is not executed
599 in between the multiple evaluations. In particular, we assume that a
600 non-addressable local variable will not be modified. */
603 builtin_save_expr (tree exp)
605 if (TREE_ADDRESSABLE (exp) == 0
606 && (TREE_CODE (exp) == PARM_DECL
607 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610 return save_expr (exp);
613 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
614 times to get the address of either a higher stack frame, or a return
615 address located within it (depending on FNDECL_CODE). */
618 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
622 #ifdef INITIAL_FRAME_ADDRESS_RTX
623 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
627 /* For a zero count with __builtin_return_address, we don't care what
628 frame address we return, because target-specific definitions will
629 override us. Therefore frame pointer elimination is OK, and using
630 the soft frame pointer is OK.
632 For a nonzero count, or a zero count with __builtin_frame_address,
633 we require a stable offset from the current frame pointer to the
634 previous one, so we must use the hard frame pointer, and
635 we must disable frame pointer elimination. */
636 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
637 tem = frame_pointer_rtx;
640 tem = hard_frame_pointer_rtx;
642 /* Tell reload not to eliminate the frame pointer. */
643 crtl->accesses_prior_frames = 1;
647 /* Some machines need special handling before we can access
648 arbitrary frames. For example, on the SPARC, we must first flush
649 all register windows to the stack. */
650 #ifdef SETUP_FRAME_ADDRESSES
652 SETUP_FRAME_ADDRESSES ();
655 /* On the SPARC, the return address is not in the frame, it is in a
656 register. There is no way to access it off of the current frame
657 pointer, but it can be accessed off the previous frame pointer by
658 reading the value from the register window save area. */
659 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
660 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
664 /* Scan back COUNT frames to the specified frame. */
665 for (i = 0; i < count; i++)
667 /* Assume the dynamic chain pointer is in the word that the
668 frame address points to, unless otherwise specified. */
669 #ifdef DYNAMIC_CHAIN_ADDRESS
670 tem = DYNAMIC_CHAIN_ADDRESS (tem);
672 tem = memory_address (Pmode, tem);
673 tem = gen_frame_mem (Pmode, tem);
674 tem = copy_to_reg (tem);
677 /* For __builtin_frame_address, return what we've got. But, on
678 the SPARC for example, we may have to add a bias. */
679 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
680 #ifdef FRAME_ADDR_RTX
681 return FRAME_ADDR_RTX (tem);
686 /* For __builtin_return_address, get the return address from that frame. */
687 #ifdef RETURN_ADDR_RTX
688 tem = RETURN_ADDR_RTX (count, tem);
690 tem = memory_address (Pmode,
691 plus_constant (tem, GET_MODE_SIZE (Pmode)));
692 tem = gen_frame_mem (Pmode, tem);
697 /* Alias set used for setjmp buffer. */
698 static alias_set_type setjmp_alias_set = -1;
700 /* Construct the leading half of a __builtin_setjmp call. Control will
701 return to RECEIVER_LABEL. This is also called directly by the SJLJ
702 exception handling code. */
705 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
707 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
711 if (setjmp_alias_set == -1)
712 setjmp_alias_set = new_alias_set ();
714 buf_addr = convert_memory_address (Pmode, buf_addr);
716 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
718 /* We store the frame pointer and the address of receiver_label in
719 the buffer and use the rest of it for the stack save area, which
720 is machine-dependent. */
722 mem = gen_rtx_MEM (Pmode, buf_addr);
723 set_mem_alias_set (mem, setjmp_alias_set);
724 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
726 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
727 set_mem_alias_set (mem, setjmp_alias_set);
729 emit_move_insn (validize_mem (mem),
730 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
732 stack_save = gen_rtx_MEM (sa_mode,
733 plus_constant (buf_addr,
734 2 * GET_MODE_SIZE (Pmode)));
735 set_mem_alias_set (stack_save, setjmp_alias_set);
736 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
738 /* If there is further processing to do, do it. */
739 #ifdef HAVE_builtin_setjmp_setup
740 if (HAVE_builtin_setjmp_setup)
741 emit_insn (gen_builtin_setjmp_setup (buf_addr));
744 /* Tell optimize_save_area_alloca that extra work is going to
745 need to go on during alloca. */
746 cfun->calls_setjmp = 1;
748 /* We have a nonlocal label. */
749 cfun->has_nonlocal_label = 1;
752 /* Construct the trailing part of a __builtin_setjmp call. This is
753 also called directly by the SJLJ exception handling code. */
756 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
760 /* Clobber the FP when we get here, so we have to make sure it's
761 marked as used by this function. */
762 emit_use (hard_frame_pointer_rtx);
764 /* Mark the static chain as clobbered here so life information
765 doesn't get messed up for it. */
766 chain = targetm.calls.static_chain (current_function_decl, true);
767 if (chain && REG_P (chain))
768 emit_clobber (chain);
770 /* Now put in the code to restore the frame pointer, and argument
771 pointer, if needed. */
772 #ifdef HAVE_nonlocal_goto
773 if (! HAVE_nonlocal_goto)
776 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
777 /* This might change the hard frame pointer in ways that aren't
778 apparent to early optimization passes, so force a clobber. */
779 emit_clobber (hard_frame_pointer_rtx);
782 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
783 if (fixed_regs[ARG_POINTER_REGNUM])
785 #ifdef ELIMINABLE_REGS
787 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
789 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
790 if (elim_regs[i].from == ARG_POINTER_REGNUM
791 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
794 if (i == ARRAY_SIZE (elim_regs))
797 /* Now restore our arg pointer from the address at which it
798 was saved in our stack frame. */
799 emit_move_insn (crtl->args.internal_arg_pointer,
800 copy_to_reg (get_arg_pointer_save_area ()));
805 #ifdef HAVE_builtin_setjmp_receiver
806 if (HAVE_builtin_setjmp_receiver)
807 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
810 #ifdef HAVE_nonlocal_goto_receiver
811 if (HAVE_nonlocal_goto_receiver)
812 emit_insn (gen_nonlocal_goto_receiver ());
817 /* We must not allow the code we just generated to be reordered by
818 scheduling. Specifically, the update of the frame pointer must
819 happen immediately, not later. */
820 emit_insn (gen_blockage ());
823 /* __builtin_longjmp is passed a pointer to an array of five words (not
824 all will be used on all machines). It operates similarly to the C
825 library function of the same name, but is more efficient. Much of
826 the code below is copied from the handling of non-local gotos. */
829 expand_builtin_longjmp (rtx buf_addr, rtx value)
831 rtx fp, lab, stack, insn, last;
832 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
834 /* DRAP is needed for stack realign if longjmp is expanded to current
836 if (SUPPORTS_STACK_ALIGNMENT)
837 crtl->need_drap = true;
839 if (setjmp_alias_set == -1)
840 setjmp_alias_set = new_alias_set ();
842 buf_addr = convert_memory_address (Pmode, buf_addr);
844 buf_addr = force_reg (Pmode, buf_addr);
846 /* We require that the user must pass a second argument of 1, because
847 that is what builtin_setjmp will return. */
848 gcc_assert (value == const1_rtx);
850 last = get_last_insn ();
851 #ifdef HAVE_builtin_longjmp
852 if (HAVE_builtin_longjmp)
853 emit_insn (gen_builtin_longjmp (buf_addr));
857 fp = gen_rtx_MEM (Pmode, buf_addr);
858 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
859 GET_MODE_SIZE (Pmode)));
861 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
862 2 * GET_MODE_SIZE (Pmode)));
863 set_mem_alias_set (fp, setjmp_alias_set);
864 set_mem_alias_set (lab, setjmp_alias_set);
865 set_mem_alias_set (stack, setjmp_alias_set);
867 /* Pick up FP, label, and SP from the block and jump. This code is
868 from expand_goto in stmt.c; see there for detailed comments. */
869 #ifdef HAVE_nonlocal_goto
870 if (HAVE_nonlocal_goto)
871 /* We have to pass a value to the nonlocal_goto pattern that will
872 get copied into the static_chain pointer, but it does not matter
873 what that value is, because builtin_setjmp does not use it. */
874 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
878 lab = copy_to_reg (lab);
880 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
881 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
883 emit_move_insn (hard_frame_pointer_rtx, fp);
884 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
886 emit_use (hard_frame_pointer_rtx);
887 emit_use (stack_pointer_rtx);
888 emit_indirect_jump (lab);
892 /* Search backwards and mark the jump insn as a non-local goto.
893 Note that this precludes the use of __builtin_longjmp to a
894 __builtin_setjmp target in the same function. However, we've
895 already cautioned the user that these functions are for
896 internal exception handling use only. */
897 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
899 gcc_assert (insn != last);
903 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906 else if (CALL_P (insn))
911 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
912 and the address of the save area. */
915 expand_builtin_nonlocal_goto (tree exp)
917 tree t_label, t_save_area;
918 rtx r_label, r_save_area, r_fp, r_sp, insn;
920 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923 t_label = CALL_EXPR_ARG (exp, 0);
924 t_save_area = CALL_EXPR_ARG (exp, 1);
926 r_label = expand_normal (t_label);
927 r_label = convert_memory_address (Pmode, r_label);
928 r_save_area = expand_normal (t_save_area);
929 r_save_area = convert_memory_address (Pmode, r_save_area);
930 /* Copy the address of the save location to a register just in case it was based
931 on the frame pointer. */
932 r_save_area = copy_to_reg (r_save_area);
933 r_fp = gen_rtx_MEM (Pmode, r_save_area);
934 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
935 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
937 crtl->has_nonlocal_goto = 1;
939 #ifdef HAVE_nonlocal_goto
940 /* ??? We no longer need to pass the static chain value, afaik. */
941 if (HAVE_nonlocal_goto)
942 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
946 r_label = copy_to_reg (r_label);
948 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
949 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
951 /* Restore frame pointer for containing function.
952 This sets the actual hard register used for the frame pointer
953 to the location of the function's incoming static chain info.
954 The non-local goto handler will then adjust it to contain the
955 proper value and reload the argument pointer, if needed. */
956 emit_move_insn (hard_frame_pointer_rtx, r_fp);
957 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
959 /* USE of hard_frame_pointer_rtx added for consistency;
960 not clear if really needed. */
961 emit_use (hard_frame_pointer_rtx);
962 emit_use (stack_pointer_rtx);
964 /* If the architecture is using a GP register, we must
965 conservatively assume that the target function makes use of it.
966 The prologue of functions with nonlocal gotos must therefore
967 initialize the GP register to the appropriate value, and we
968 must then make sure that this value is live at the point
969 of the jump. (Note that this doesn't necessarily apply
970 to targets with a nonlocal_goto pattern; they are free
971 to implement it in their own way. Note also that this is
972 a no-op if the GP register is a global invariant.) */
973 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
974 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
975 emit_use (pic_offset_table_rtx);
977 emit_indirect_jump (r_label);
980 /* Search backwards to the jump insn and mark it as a
982 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
986 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989 else if (CALL_P (insn))
996 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
997 (not all will be used on all machines) that was passed to __builtin_setjmp.
998 It updates the stack pointer in that block to correspond to the current
1002 expand_builtin_update_setjmp_buf (rtx buf_addr)
1004 enum machine_mode sa_mode = Pmode;
1008 #ifdef HAVE_save_stack_nonlocal
1009 if (HAVE_save_stack_nonlocal)
1010 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1012 #ifdef STACK_SAVEAREA_MODE
1013 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1017 = gen_rtx_MEM (sa_mode,
1020 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1024 emit_insn (gen_setjmp ());
1027 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1030 /* Expand a call to __builtin_prefetch. For a target that does not support
1031 data prefetch, evaluate the memory address argument in case it has side
1035 expand_builtin_prefetch (tree exp)
1037 tree arg0, arg1, arg2;
1041 if (!validate_arglist (exp, POINTER_TYPE, 0))
1044 arg0 = CALL_EXPR_ARG (exp, 0);
1046 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1047 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1049 nargs = call_expr_nargs (exp);
1051 arg1 = CALL_EXPR_ARG (exp, 1);
1053 arg1 = integer_zero_node;
1055 arg2 = CALL_EXPR_ARG (exp, 2);
1057 arg2 = build_int_cst (NULL_TREE, 3);
1059 /* Argument 0 is an address. */
1060 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1062 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1063 if (TREE_CODE (arg1) != INTEGER_CST)
1065 error ("second argument to %<__builtin_prefetch%> must be a constant");
1066 arg1 = integer_zero_node;
1068 op1 = expand_normal (arg1);
1069 /* Argument 1 must be either zero or one. */
1070 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1072 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1077 /* Argument 2 (locality) must be a compile-time constant int. */
1078 if (TREE_CODE (arg2) != INTEGER_CST)
1080 error ("third argument to %<__builtin_prefetch%> must be a constant");
1081 arg2 = integer_zero_node;
1083 op2 = expand_normal (arg2);
1084 /* Argument 2 must be 0, 1, 2, or 3. */
1085 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1087 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1091 #ifdef HAVE_prefetch
1094 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1096 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1097 || (GET_MODE (op0) != Pmode))
1099 op0 = convert_memory_address (Pmode, op0);
1100 op0 = force_reg (Pmode, op0);
1102 emit_insn (gen_prefetch (op0, op1, op2));
1106 /* Don't do anything with direct references to volatile memory, but
1107 generate code to handle other side effects. */
1108 if (!MEM_P (op0) && side_effects_p (op0))
1112 /* Get a MEM rtx for expression EXP which is the address of an operand
1113 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1114 the maximum length of the block of memory that might be accessed or
1118 get_memory_rtx (tree exp, tree len)
1120 tree orig_exp = exp;
1124 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1125 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1126 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1127 exp = TREE_OPERAND (exp, 0);
1129 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1130 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1132 /* Get an expression we can use to find the attributes to assign to MEM.
1133 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1134 we can. First remove any nops. */
1135 while (CONVERT_EXPR_P (exp)
1136 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1137 exp = TREE_OPERAND (exp, 0);
1140 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1141 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1142 && host_integerp (TREE_OPERAND (exp, 1), 0)
1143 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1144 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1145 else if (TREE_CODE (exp) == ADDR_EXPR)
1146 exp = TREE_OPERAND (exp, 0);
1147 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1148 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1152 /* Honor attributes derived from exp, except for the alias set
1153 (as builtin stringops may alias with anything) and the size
1154 (as stringops may access multiple array elements). */
1157 set_mem_attributes (mem, exp, 0);
1160 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1162 /* Allow the string and memory builtins to overflow from one
1163 field into another, see http://gcc.gnu.org/PR23561.
1164 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1165 memory accessed by the string or memory builtin will fit
1166 within the field. */
1167 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1169 tree mem_expr = MEM_EXPR (mem);
1170 HOST_WIDE_INT offset = -1, length = -1;
1173 while (TREE_CODE (inner) == ARRAY_REF
1174 || CONVERT_EXPR_P (inner)
1175 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1176 || TREE_CODE (inner) == SAVE_EXPR)
1177 inner = TREE_OPERAND (inner, 0);
1179 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1181 if (MEM_OFFSET (mem)
1182 && CONST_INT_P (MEM_OFFSET (mem)))
1183 offset = INTVAL (MEM_OFFSET (mem));
1185 if (offset >= 0 && len && host_integerp (len, 0))
1186 length = tree_low_cst (len, 0);
1188 while (TREE_CODE (inner) == COMPONENT_REF)
1190 tree field = TREE_OPERAND (inner, 1);
1191 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1192 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1194 /* Bitfields are generally not byte-addressable. */
1195 gcc_assert (!DECL_BIT_FIELD (field)
1196 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1197 % BITS_PER_UNIT) == 0
1198 && host_integerp (DECL_SIZE (field), 0)
1199 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1200 % BITS_PER_UNIT) == 0));
1202 /* If we can prove that the memory starting at XEXP (mem, 0) and
1203 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1204 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1205 fields without DECL_SIZE_UNIT like flexible array members. */
1207 && DECL_SIZE_UNIT (field)
1208 && host_integerp (DECL_SIZE_UNIT (field), 0))
1211 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214 && offset + length <= size)
1219 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1220 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1221 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1229 mem_expr = TREE_OPERAND (mem_expr, 0);
1230 inner = TREE_OPERAND (inner, 0);
1233 if (mem_expr == NULL)
1235 if (mem_expr != MEM_EXPR (mem))
1237 set_mem_expr (mem, mem_expr);
1238 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1241 set_mem_alias_set (mem, 0);
1242 set_mem_size (mem, NULL_RTX);
1248 /* Built-in functions to perform an untyped call and return. */
1250 /* For each register that may be used for calling a function, this
1251 gives a mode used to copy the register's value. VOIDmode indicates
1252 the register is not used for calling a function. If the machine
1253 has register windows, this gives only the outbound registers.
1254 INCOMING_REGNO gives the corresponding inbound register. */
1255 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1257 /* For each register that may be used for returning values, this gives
1258 a mode used to copy the register's value. VOIDmode indicates the
1259 register is not used for returning values. If the machine has
1260 register windows, this gives only the outbound registers.
1261 INCOMING_REGNO gives the corresponding inbound register. */
1262 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1264 /* For each register that may be used for calling a function, this
1265 gives the offset of that register into the block returned by
1266 __builtin_apply_args. 0 indicates that the register is not
1267 used for calling a function. */
1268 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1270 /* Return the size required for the block returned by __builtin_apply_args,
1271 and initialize apply_args_mode. */
1274 apply_args_size (void)
1276 static int size = -1;
1279 enum machine_mode mode;
1281 /* The values computed by this function never change. */
1284 /* The first value is the incoming arg-pointer. */
1285 size = GET_MODE_SIZE (Pmode);
1287 /* The second value is the structure value address unless this is
1288 passed as an "invisible" first argument. */
1289 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1290 size += GET_MODE_SIZE (Pmode);
1292 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1293 if (FUNCTION_ARG_REGNO_P (regno))
1295 mode = reg_raw_mode[regno];
1297 gcc_assert (mode != VOIDmode);
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 apply_args_reg_offset[regno] = size;
1303 size += GET_MODE_SIZE (mode);
1304 apply_args_mode[regno] = mode;
1308 apply_args_mode[regno] = VOIDmode;
1309 apply_args_reg_offset[regno] = 0;
1315 /* Return the size required for the block returned by __builtin_apply,
1316 and initialize apply_result_mode. */
1319 apply_result_size (void)
1321 static int size = -1;
1323 enum machine_mode mode;
1325 /* The values computed by this function never change. */
1330 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1331 if (FUNCTION_VALUE_REGNO_P (regno))
1333 mode = reg_raw_mode[regno];
1335 gcc_assert (mode != VOIDmode);
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1340 size += GET_MODE_SIZE (mode);
1341 apply_result_mode[regno] = mode;
1344 apply_result_mode[regno] = VOIDmode;
1346 /* Allow targets that use untyped_call and untyped_return to override
1347 the size so that machine-specific information can be stored here. */
1348 #ifdef APPLY_RESULT_SIZE
1349 size = APPLY_RESULT_SIZE;
1355 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1356 /* Create a vector describing the result block RESULT. If SAVEP is true,
1357 the result block is used to save the values; otherwise it is used to
1358 restore the values. */
1361 result_vector (int savep, rtx result)
1363 int regno, size, align, nelts;
1364 enum machine_mode mode;
1366 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1369 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1370 if ((mode = apply_result_mode[regno]) != VOIDmode)
1372 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1373 if (size % align != 0)
1374 size = CEIL (size, align) * align;
1375 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1376 mem = adjust_address (result, mode, size);
1377 savevec[nelts++] = (savep
1378 ? gen_rtx_SET (VOIDmode, mem, reg)
1379 : gen_rtx_SET (VOIDmode, reg, mem));
1380 size += GET_MODE_SIZE (mode);
1382 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1384 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1386 /* Save the state required to perform an untyped call with the same
1387 arguments as were passed to the current function. */
1390 expand_builtin_apply_args_1 (void)
1393 int size, align, regno;
1394 enum machine_mode mode;
1395 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1397 /* Create a block where the arg-pointer, structure value address,
1398 and argument registers can be saved. */
1399 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1401 /* Walk past the arg-pointer and structure value address. */
1402 size = GET_MODE_SIZE (Pmode);
1403 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1404 size += GET_MODE_SIZE (Pmode);
1406 /* Save each register used in calling a function to the block. */
1407 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1408 if ((mode = apply_args_mode[regno]) != VOIDmode)
1410 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1411 if (size % align != 0)
1412 size = CEIL (size, align) * align;
1414 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1416 emit_move_insn (adjust_address (registers, mode, size), tem);
1417 size += GET_MODE_SIZE (mode);
1420 /* Save the arg pointer to the block. */
1421 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1422 #ifdef STACK_GROWS_DOWNWARD
1423 /* We need the pointer as the caller actually passed them to us, not
1424 as we might have pretended they were passed. Make sure it's a valid
1425 operand, as emit_move_insn isn't expected to handle a PLUS. */
1427 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1430 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1432 size = GET_MODE_SIZE (Pmode);
1434 /* Save the structure value address unless this is passed as an
1435 "invisible" first argument. */
1436 if (struct_incoming_value)
1438 emit_move_insn (adjust_address (registers, Pmode, size),
1439 copy_to_reg (struct_incoming_value));
1440 size += GET_MODE_SIZE (Pmode);
1443 /* Return the address of the block. */
1444 return copy_addr_to_reg (XEXP (registers, 0));
1447 /* __builtin_apply_args returns block of memory allocated on
1448 the stack into which is stored the arg pointer, structure
1449 value address, static chain, and all the registers that might
1450 possibly be used in performing a function call. The code is
1451 moved to the start of the function so the incoming values are
1455 expand_builtin_apply_args (void)
1457 /* Don't do __builtin_apply_args more than once in a function.
1458 Save the result of the first call and reuse it. */
1459 if (apply_args_value != 0)
1460 return apply_args_value;
1462 /* When this function is called, it means that registers must be
1463 saved on entry to this function. So we migrate the
1464 call to the first insn of this function. */
1469 temp = expand_builtin_apply_args_1 ();
1473 apply_args_value = temp;
1475 /* Put the insns after the NOTE that starts the function.
1476 If this is inside a start_sequence, make the outer-level insn
1477 chain current, so the code is placed at the start of the
1478 function. If internal_arg_pointer is a non-virtual pseudo,
1479 it needs to be placed after the function that initializes
1481 push_topmost_sequence ();
1482 if (REG_P (crtl->args.internal_arg_pointer)
1483 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1484 emit_insn_before (seq, parm_birth_insn);
1486 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1487 pop_topmost_sequence ();
1492 /* Perform an untyped call and save the state required to perform an
1493 untyped return of whatever value was returned by the given function. */
1496 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1498 int size, align, regno;
1499 enum machine_mode mode;
1500 rtx incoming_args, result, reg, dest, src, call_insn;
1501 rtx old_stack_level = 0;
1502 rtx call_fusage = 0;
1503 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1505 arguments = convert_memory_address (Pmode, arguments);
1507 /* Create a block where the return registers can be saved. */
1508 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1510 /* Fetch the arg pointer from the ARGUMENTS block. */
1511 incoming_args = gen_reg_rtx (Pmode);
1512 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1513 #ifndef STACK_GROWS_DOWNWARD
1514 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1515 incoming_args, 0, OPTAB_LIB_WIDEN);
1518 /* Push a new argument block and copy the arguments. Do not allow
1519 the (potential) memcpy call below to interfere with our stack
1521 do_pending_stack_adjust ();
1524 /* Save the stack with nonlocal if available. */
1525 #ifdef HAVE_save_stack_nonlocal
1526 if (HAVE_save_stack_nonlocal)
1527 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1530 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1532 /* Allocate a block of memory onto the stack and copy the memory
1533 arguments to the outgoing arguments address. */
1534 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1536 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1537 may have already set current_function_calls_alloca to true.
1538 current_function_calls_alloca won't be set if argsize is zero,
1539 so we have to guarantee need_drap is true here. */
1540 if (SUPPORTS_STACK_ALIGNMENT)
1541 crtl->need_drap = true;
1543 dest = virtual_outgoing_args_rtx;
1544 #ifndef STACK_GROWS_DOWNWARD
1545 if (CONST_INT_P (argsize))
1546 dest = plus_constant (dest, -INTVAL (argsize));
1548 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1550 dest = gen_rtx_MEM (BLKmode, dest);
1551 set_mem_align (dest, PARM_BOUNDARY);
1552 src = gen_rtx_MEM (BLKmode, incoming_args);
1553 set_mem_align (src, PARM_BOUNDARY);
1554 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1556 /* Refer to the argument block. */
1558 arguments = gen_rtx_MEM (BLKmode, arguments);
1559 set_mem_align (arguments, PARM_BOUNDARY);
1561 /* Walk past the arg-pointer and structure value address. */
1562 size = GET_MODE_SIZE (Pmode);
1564 size += GET_MODE_SIZE (Pmode);
1566 /* Restore each of the registers previously saved. Make USE insns
1567 for each of these registers for use in making the call. */
1568 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1569 if ((mode = apply_args_mode[regno]) != VOIDmode)
1571 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1572 if (size % align != 0)
1573 size = CEIL (size, align) * align;
1574 reg = gen_rtx_REG (mode, regno);
1575 emit_move_insn (reg, adjust_address (arguments, mode, size));
1576 use_reg (&call_fusage, reg);
1577 size += GET_MODE_SIZE (mode);
1580 /* Restore the structure value address unless this is passed as an
1581 "invisible" first argument. */
1582 size = GET_MODE_SIZE (Pmode);
1585 rtx value = gen_reg_rtx (Pmode);
1586 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1587 emit_move_insn (struct_value, value);
1588 if (REG_P (struct_value))
1589 use_reg (&call_fusage, struct_value);
1590 size += GET_MODE_SIZE (Pmode);
1593 /* All arguments and registers used for the call are set up by now! */
1594 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1596 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1597 and we don't want to load it into a register as an optimization,
1598 because prepare_call_address already did it if it should be done. */
1599 if (GET_CODE (function) != SYMBOL_REF)
1600 function = memory_address (FUNCTION_MODE, function);
1602 /* Generate the actual call instruction and save the return value. */
1603 #ifdef HAVE_untyped_call
1604 if (HAVE_untyped_call)
1605 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1606 result, result_vector (1, result)));
1609 #ifdef HAVE_call_value
1610 if (HAVE_call_value)
1614 /* Locate the unique return register. It is not possible to
1615 express a call that sets more than one return register using
1616 call_value; use untyped_call for that. In fact, untyped_call
1617 only needs to save the return registers in the given block. */
1618 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1619 if ((mode = apply_result_mode[regno]) != VOIDmode)
1621 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1623 valreg = gen_rtx_REG (mode, regno);
1626 emit_call_insn (GEN_CALL_VALUE (valreg,
1627 gen_rtx_MEM (FUNCTION_MODE, function),
1628 const0_rtx, NULL_RTX, const0_rtx));
1630 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1636 /* Find the CALL insn we just emitted, and attach the register usage
1638 call_insn = last_call_insn ();
1639 add_function_usage_to (call_insn, call_fusage);
1641 /* Restore the stack. */
1642 #ifdef HAVE_save_stack_nonlocal
1643 if (HAVE_save_stack_nonlocal)
1644 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1647 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1651 /* Return the address of the result block. */
1652 result = copy_addr_to_reg (XEXP (result, 0));
1653 return convert_memory_address (ptr_mode, result);
1656 /* Perform an untyped return. */
1659 expand_builtin_return (rtx result)
1661 int size, align, regno;
1662 enum machine_mode mode;
1664 rtx call_fusage = 0;
1666 result = convert_memory_address (Pmode, result);
1668 apply_result_size ();
1669 result = gen_rtx_MEM (BLKmode, result);
1671 #ifdef HAVE_untyped_return
1672 if (HAVE_untyped_return)
1674 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1680 /* Restore the return value and note that each value is used. */
1682 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1683 if ((mode = apply_result_mode[regno]) != VOIDmode)
1685 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1686 if (size % align != 0)
1687 size = CEIL (size, align) * align;
1688 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1689 emit_move_insn (reg, adjust_address (result, mode, size));
1691 push_to_sequence (call_fusage);
1693 call_fusage = get_insns ();
1695 size += GET_MODE_SIZE (mode);
1698 /* Put the USE insns before the return. */
1699 emit_insn (call_fusage);
1701 /* Return whatever values was restored by jumping directly to the end
1703 expand_naked_return ();
1706 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1708 static enum type_class
1709 type_to_class (tree type)
1711 switch (TREE_CODE (type))
1713 case VOID_TYPE: return void_type_class;
1714 case INTEGER_TYPE: return integer_type_class;
1715 case ENUMERAL_TYPE: return enumeral_type_class;
1716 case BOOLEAN_TYPE: return boolean_type_class;
1717 case POINTER_TYPE: return pointer_type_class;
1718 case REFERENCE_TYPE: return reference_type_class;
1719 case OFFSET_TYPE: return offset_type_class;
1720 case REAL_TYPE: return real_type_class;
1721 case COMPLEX_TYPE: return complex_type_class;
1722 case FUNCTION_TYPE: return function_type_class;
1723 case METHOD_TYPE: return method_type_class;
1724 case RECORD_TYPE: return record_type_class;
1726 case QUAL_UNION_TYPE: return union_type_class;
1727 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1728 ? string_type_class : array_type_class);
1729 case LANG_TYPE: return lang_type_class;
1730 default: return no_type_class;
1734 /* Expand a call EXP to __builtin_classify_type. */
1737 expand_builtin_classify_type (tree exp)
1739 if (call_expr_nargs (exp))
1740 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1741 return GEN_INT (no_type_class);
1744 /* This helper macro, meant to be used in mathfn_built_in below,
1745 determines which among a set of three builtin math functions is
1746 appropriate for a given type mode. The `F' and `L' cases are
1747 automatically generated from the `double' case. */
1748 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1749 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1750 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1751 fcodel = BUILT_IN_MATHFN##L ; break;
1752 /* Similar to above, but appends _R after any F/L suffix. */
1753 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1754 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1755 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1756 fcodel = BUILT_IN_MATHFN##L_R ; break;
1758 /* Return mathematic function equivalent to FN but operating directly
1759 on TYPE, if available. If IMPLICIT is true find the function in
1760 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1761 can't do the conversion, return zero. */
1764 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1766 tree const *const fn_arr
1767 = implicit ? implicit_built_in_decls : built_in_decls;
1768 enum built_in_function fcode, fcodef, fcodel;
1772 CASE_MATHFN (BUILT_IN_ACOS)
1773 CASE_MATHFN (BUILT_IN_ACOSH)
1774 CASE_MATHFN (BUILT_IN_ASIN)
1775 CASE_MATHFN (BUILT_IN_ASINH)
1776 CASE_MATHFN (BUILT_IN_ATAN)
1777 CASE_MATHFN (BUILT_IN_ATAN2)
1778 CASE_MATHFN (BUILT_IN_ATANH)
1779 CASE_MATHFN (BUILT_IN_CBRT)
1780 CASE_MATHFN (BUILT_IN_CEIL)
1781 CASE_MATHFN (BUILT_IN_CEXPI)
1782 CASE_MATHFN (BUILT_IN_COPYSIGN)
1783 CASE_MATHFN (BUILT_IN_COS)
1784 CASE_MATHFN (BUILT_IN_COSH)
1785 CASE_MATHFN (BUILT_IN_DREM)
1786 CASE_MATHFN (BUILT_IN_ERF)
1787 CASE_MATHFN (BUILT_IN_ERFC)
1788 CASE_MATHFN (BUILT_IN_EXP)
1789 CASE_MATHFN (BUILT_IN_EXP10)
1790 CASE_MATHFN (BUILT_IN_EXP2)
1791 CASE_MATHFN (BUILT_IN_EXPM1)
1792 CASE_MATHFN (BUILT_IN_FABS)
1793 CASE_MATHFN (BUILT_IN_FDIM)
1794 CASE_MATHFN (BUILT_IN_FLOOR)
1795 CASE_MATHFN (BUILT_IN_FMA)
1796 CASE_MATHFN (BUILT_IN_FMAX)
1797 CASE_MATHFN (BUILT_IN_FMIN)
1798 CASE_MATHFN (BUILT_IN_FMOD)
1799 CASE_MATHFN (BUILT_IN_FREXP)
1800 CASE_MATHFN (BUILT_IN_GAMMA)
1801 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1802 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1803 CASE_MATHFN (BUILT_IN_HYPOT)
1804 CASE_MATHFN (BUILT_IN_ILOGB)
1805 CASE_MATHFN (BUILT_IN_INF)
1806 CASE_MATHFN (BUILT_IN_ISINF)
1807 CASE_MATHFN (BUILT_IN_J0)
1808 CASE_MATHFN (BUILT_IN_J1)
1809 CASE_MATHFN (BUILT_IN_JN)
1810 CASE_MATHFN (BUILT_IN_LCEIL)
1811 CASE_MATHFN (BUILT_IN_LDEXP)
1812 CASE_MATHFN (BUILT_IN_LFLOOR)
1813 CASE_MATHFN (BUILT_IN_LGAMMA)
1814 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1815 CASE_MATHFN (BUILT_IN_LLCEIL)
1816 CASE_MATHFN (BUILT_IN_LLFLOOR)
1817 CASE_MATHFN (BUILT_IN_LLRINT)
1818 CASE_MATHFN (BUILT_IN_LLROUND)
1819 CASE_MATHFN (BUILT_IN_LOG)
1820 CASE_MATHFN (BUILT_IN_LOG10)
1821 CASE_MATHFN (BUILT_IN_LOG1P)
1822 CASE_MATHFN (BUILT_IN_LOG2)
1823 CASE_MATHFN (BUILT_IN_LOGB)
1824 CASE_MATHFN (BUILT_IN_LRINT)
1825 CASE_MATHFN (BUILT_IN_LROUND)
1826 CASE_MATHFN (BUILT_IN_MODF)
1827 CASE_MATHFN (BUILT_IN_NAN)
1828 CASE_MATHFN (BUILT_IN_NANS)
1829 CASE_MATHFN (BUILT_IN_NEARBYINT)
1830 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1831 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1832 CASE_MATHFN (BUILT_IN_POW)
1833 CASE_MATHFN (BUILT_IN_POWI)
1834 CASE_MATHFN (BUILT_IN_POW10)
1835 CASE_MATHFN (BUILT_IN_REMAINDER)
1836 CASE_MATHFN (BUILT_IN_REMQUO)
1837 CASE_MATHFN (BUILT_IN_RINT)
1838 CASE_MATHFN (BUILT_IN_ROUND)
1839 CASE_MATHFN (BUILT_IN_SCALB)
1840 CASE_MATHFN (BUILT_IN_SCALBLN)
1841 CASE_MATHFN (BUILT_IN_SCALBN)
1842 CASE_MATHFN (BUILT_IN_SIGNBIT)
1843 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1844 CASE_MATHFN (BUILT_IN_SIN)
1845 CASE_MATHFN (BUILT_IN_SINCOS)
1846 CASE_MATHFN (BUILT_IN_SINH)
1847 CASE_MATHFN (BUILT_IN_SQRT)
1848 CASE_MATHFN (BUILT_IN_TAN)
1849 CASE_MATHFN (BUILT_IN_TANH)
1850 CASE_MATHFN (BUILT_IN_TGAMMA)
1851 CASE_MATHFN (BUILT_IN_TRUNC)
1852 CASE_MATHFN (BUILT_IN_Y0)
1853 CASE_MATHFN (BUILT_IN_Y1)
1854 CASE_MATHFN (BUILT_IN_YN)
1860 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1861 return fn_arr[fcode];
1862 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1863 return fn_arr[fcodef];
1864 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1865 return fn_arr[fcodel];
1870 /* Like mathfn_built_in_1(), but always use the implicit array. */
1873 mathfn_built_in (tree type, enum built_in_function fn)
1875 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1878 /* If errno must be maintained, expand the RTL to check if the result,
1879 TARGET, of a built-in function call, EXP, is NaN, and if so set
1883 expand_errno_check (tree exp, rtx target)
1885 rtx lab = gen_label_rtx ();
1887 /* Test the result; if it is NaN, set errno=EDOM because
1888 the argument was not in the domain. */
1889 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1890 NULL_RTX, NULL_RTX, lab);
1893 /* If this built-in doesn't throw an exception, set errno directly. */
1894 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1896 #ifdef GEN_ERRNO_RTX
1897 rtx errno_rtx = GEN_ERRNO_RTX;
1900 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1902 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1908 /* Make sure the library call isn't expanded as a tail call. */
1909 CALL_EXPR_TAILCALL (exp) = 0;
1911 /* We can't set errno=EDOM directly; let the library call do it.
1912 Pop the arguments right away in case the call gets deleted. */
1914 expand_call (exp, target, 0);
1919 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1920 Return NULL_RTX if a normal call should be emitted rather than expanding
1921 the function in-line. EXP is the expression that is a call to the builtin
1922 function; if convenient, the result should be placed in TARGET.
1923 SUBTARGET may be used as the target for computing one of EXP's operands. */
1926 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1928 optab builtin_optab;
1929 rtx op0, insns, before_call;
1930 tree fndecl = get_callee_fndecl (exp);
1931 enum machine_mode mode;
1932 bool errno_set = false;
1935 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1938 arg = CALL_EXPR_ARG (exp, 0);
1940 switch (DECL_FUNCTION_CODE (fndecl))
1942 CASE_FLT_FN (BUILT_IN_SQRT):
1943 errno_set = ! tree_expr_nonnegative_p (arg);
1944 builtin_optab = sqrt_optab;
1946 CASE_FLT_FN (BUILT_IN_EXP):
1947 errno_set = true; builtin_optab = exp_optab; break;
1948 CASE_FLT_FN (BUILT_IN_EXP10):
1949 CASE_FLT_FN (BUILT_IN_POW10):
1950 errno_set = true; builtin_optab = exp10_optab; break;
1951 CASE_FLT_FN (BUILT_IN_EXP2):
1952 errno_set = true; builtin_optab = exp2_optab; break;
1953 CASE_FLT_FN (BUILT_IN_EXPM1):
1954 errno_set = true; builtin_optab = expm1_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOGB):
1956 errno_set = true; builtin_optab = logb_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG):
1958 errno_set = true; builtin_optab = log_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG10):
1960 errno_set = true; builtin_optab = log10_optab; break;
1961 CASE_FLT_FN (BUILT_IN_LOG2):
1962 errno_set = true; builtin_optab = log2_optab; break;
1963 CASE_FLT_FN (BUILT_IN_LOG1P):
1964 errno_set = true; builtin_optab = log1p_optab; break;
1965 CASE_FLT_FN (BUILT_IN_ASIN):
1966 builtin_optab = asin_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ACOS):
1968 builtin_optab = acos_optab; break;
1969 CASE_FLT_FN (BUILT_IN_TAN):
1970 builtin_optab = tan_optab; break;
1971 CASE_FLT_FN (BUILT_IN_ATAN):
1972 builtin_optab = atan_optab; break;
1973 CASE_FLT_FN (BUILT_IN_FLOOR):
1974 builtin_optab = floor_optab; break;
1975 CASE_FLT_FN (BUILT_IN_CEIL):
1976 builtin_optab = ceil_optab; break;
1977 CASE_FLT_FN (BUILT_IN_TRUNC):
1978 builtin_optab = btrunc_optab; break;
1979 CASE_FLT_FN (BUILT_IN_ROUND):
1980 builtin_optab = round_optab; break;
1981 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1982 builtin_optab = nearbyint_optab;
1983 if (flag_trapping_math)
1985 /* Else fallthrough and expand as rint. */
1986 CASE_FLT_FN (BUILT_IN_RINT):
1987 builtin_optab = rint_optab; break;
1988 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1989 builtin_optab = significand_optab; break;
1994 /* Make a suitable register to place result in. */
1995 mode = TYPE_MODE (TREE_TYPE (exp));
1997 if (! flag_errno_math || ! HONOR_NANS (mode))
2000 /* Before working hard, check whether the instruction is available. */
2001 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2003 target = gen_reg_rtx (mode);
2005 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2006 need to expand the argument again. This way, we will not perform
2007 side-effects more the once. */
2008 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2010 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2014 /* Compute into TARGET.
2015 Set TARGET to wherever the result comes back. */
2016 target = expand_unop (mode, builtin_optab, op0, target, 0);
2021 expand_errno_check (exp, target);
2023 /* Output the entire sequence. */
2024 insns = get_insns ();
2030 /* If we were unable to expand via the builtin, stop the sequence
2031 (without outputting the insns) and call to the library function
2032 with the stabilized argument list. */
2036 before_call = get_last_insn ();
2038 return expand_call (exp, target, target == const0_rtx);
2041 /* Expand a call to the builtin binary math functions (pow and atan2).
2042 Return NULL_RTX if a normal call should be emitted rather than expanding the
2043 function in-line. EXP is the expression that is a call to the builtin
2044 function; if convenient, the result should be placed in TARGET.
2045 SUBTARGET may be used as the target for computing one of EXP's
2049 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2051 optab builtin_optab;
2052 rtx op0, op1, insns;
2053 int op1_type = REAL_TYPE;
2054 tree fndecl = get_callee_fndecl (exp);
2056 enum machine_mode mode;
2057 bool errno_set = true;
2059 switch (DECL_FUNCTION_CODE (fndecl))
2061 CASE_FLT_FN (BUILT_IN_SCALBN):
2062 CASE_FLT_FN (BUILT_IN_SCALBLN):
2063 CASE_FLT_FN (BUILT_IN_LDEXP):
2064 op1_type = INTEGER_TYPE;
2069 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2072 arg0 = CALL_EXPR_ARG (exp, 0);
2073 arg1 = CALL_EXPR_ARG (exp, 1);
2075 switch (DECL_FUNCTION_CODE (fndecl))
2077 CASE_FLT_FN (BUILT_IN_POW):
2078 builtin_optab = pow_optab; break;
2079 CASE_FLT_FN (BUILT_IN_ATAN2):
2080 builtin_optab = atan2_optab; break;
2081 CASE_FLT_FN (BUILT_IN_SCALB):
2082 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2084 builtin_optab = scalb_optab; break;
2085 CASE_FLT_FN (BUILT_IN_SCALBN):
2086 CASE_FLT_FN (BUILT_IN_SCALBLN):
2087 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2089 /* Fall through... */
2090 CASE_FLT_FN (BUILT_IN_LDEXP):
2091 builtin_optab = ldexp_optab; break;
2092 CASE_FLT_FN (BUILT_IN_FMOD):
2093 builtin_optab = fmod_optab; break;
2094 CASE_FLT_FN (BUILT_IN_REMAINDER):
2095 CASE_FLT_FN (BUILT_IN_DREM):
2096 builtin_optab = remainder_optab; break;
2101 /* Make a suitable register to place result in. */
2102 mode = TYPE_MODE (TREE_TYPE (exp));
2104 /* Before working hard, check whether the instruction is available. */
2105 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2108 target = gen_reg_rtx (mode);
2110 if (! flag_errno_math || ! HONOR_NANS (mode))
2113 /* Always stabilize the argument list. */
2114 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2115 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2117 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2118 op1 = expand_normal (arg1);
2122 /* Compute into TARGET.
2123 Set TARGET to wherever the result comes back. */
2124 target = expand_binop (mode, builtin_optab, op0, op1,
2125 target, 0, OPTAB_DIRECT);
2127 /* If we were unable to expand via the builtin, stop the sequence
2128 (without outputting the insns) and call to the library function
2129 with the stabilized argument list. */
2133 return expand_call (exp, target, target == const0_rtx);
2137 expand_errno_check (exp, target);
2139 /* Output the entire sequence. */
2140 insns = get_insns ();
2147 /* Expand a call to the builtin sin and cos math functions.
2148 Return NULL_RTX if a normal call should be emitted rather than expanding the
2149 function in-line. EXP is the expression that is a call to the builtin
2150 function; if convenient, the result should be placed in TARGET.
2151 SUBTARGET may be used as the target for computing one of EXP's
2155 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2157 optab builtin_optab;
2159 tree fndecl = get_callee_fndecl (exp);
2160 enum machine_mode mode;
2163 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2166 arg = CALL_EXPR_ARG (exp, 0);
2168 switch (DECL_FUNCTION_CODE (fndecl))
2170 CASE_FLT_FN (BUILT_IN_SIN):
2171 CASE_FLT_FN (BUILT_IN_COS):
2172 builtin_optab = sincos_optab; break;
2177 /* Make a suitable register to place result in. */
2178 mode = TYPE_MODE (TREE_TYPE (exp));
2180 /* Check if sincos insn is available, otherwise fallback
2181 to sin or cos insn. */
2182 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2183 switch (DECL_FUNCTION_CODE (fndecl))
2185 CASE_FLT_FN (BUILT_IN_SIN):
2186 builtin_optab = sin_optab; break;
2187 CASE_FLT_FN (BUILT_IN_COS):
2188 builtin_optab = cos_optab; break;
2193 /* Before working hard, check whether the instruction is available. */
2194 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2196 target = gen_reg_rtx (mode);
2198 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2199 need to expand the argument again. This way, we will not perform
2200 side-effects more the once. */
2201 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2203 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2207 /* Compute into TARGET.
2208 Set TARGET to wherever the result comes back. */
2209 if (builtin_optab == sincos_optab)
2213 switch (DECL_FUNCTION_CODE (fndecl))
2215 CASE_FLT_FN (BUILT_IN_SIN):
2216 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2218 CASE_FLT_FN (BUILT_IN_COS):
2219 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2224 gcc_assert (result);
2228 target = expand_unop (mode, builtin_optab, op0, target, 0);
2233 /* Output the entire sequence. */
2234 insns = get_insns ();
2240 /* If we were unable to expand via the builtin, stop the sequence
2241 (without outputting the insns) and call to the library function
2242 with the stabilized argument list. */
2246 target = expand_call (exp, target, target == const0_rtx);
2251 /* Expand a call to one of the builtin math functions that operate on
2252 floating point argument and output an integer result (ilogb, isinf,
2254 Return 0 if a normal call should be emitted rather than expanding the
2255 function in-line. EXP is the expression that is a call to the builtin
2256 function; if convenient, the result should be placed in TARGET.
2257 SUBTARGET may be used as the target for computing one of EXP's operands. */
2260 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2262 optab builtin_optab = 0;
2263 enum insn_code icode = CODE_FOR_nothing;
2265 tree fndecl = get_callee_fndecl (exp);
2266 enum machine_mode mode;
2267 bool errno_set = false;
2269 location_t loc = EXPR_LOCATION (exp);
2271 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2274 arg = CALL_EXPR_ARG (exp, 0);
2276 switch (DECL_FUNCTION_CODE (fndecl))
2278 CASE_FLT_FN (BUILT_IN_ILOGB):
2279 errno_set = true; builtin_optab = ilogb_optab; break;
2280 CASE_FLT_FN (BUILT_IN_ISINF):
2281 builtin_optab = isinf_optab; break;
2282 case BUILT_IN_ISNORMAL:
2283 case BUILT_IN_ISFINITE:
2284 CASE_FLT_FN (BUILT_IN_FINITE):
2285 /* These builtins have no optabs (yet). */
2291 /* There's no easy way to detect the case we need to set EDOM. */
2292 if (flag_errno_math && errno_set)
2295 /* Optab mode depends on the mode of the input argument. */
2296 mode = TYPE_MODE (TREE_TYPE (arg));
2299 icode = optab_handler (builtin_optab, mode)->insn_code;
2301 /* Before working hard, check whether the instruction is available. */
2302 if (icode != CODE_FOR_nothing)
2304 /* Make a suitable register to place result in. */
2306 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2307 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2309 gcc_assert (insn_data[icode].operand[0].predicate
2310 (target, GET_MODE (target)));
2312 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2313 need to expand the argument again. This way, we will not perform
2314 side-effects more the once. */
2315 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2317 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2319 if (mode != GET_MODE (op0))
2320 op0 = convert_to_mode (mode, op0, 0);
2322 /* Compute into TARGET.
2323 Set TARGET to wherever the result comes back. */
2324 emit_unop_insn (icode, target, op0, UNKNOWN);
2328 /* If there is no optab, try generic code. */
2329 switch (DECL_FUNCTION_CODE (fndecl))
2333 CASE_FLT_FN (BUILT_IN_ISINF):
2335 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2336 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2337 tree const type = TREE_TYPE (arg);
2341 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2342 real_from_string (&r, buf);
2343 result = build_call_expr (isgr_fn, 2,
2344 fold_build1_loc (loc, ABS_EXPR, type, arg),
2345 build_real (type, r));
2346 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2348 CASE_FLT_FN (BUILT_IN_FINITE):
2349 case BUILT_IN_ISFINITE:
2351 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2352 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2353 tree const type = TREE_TYPE (arg);
2357 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2358 real_from_string (&r, buf);
2359 result = build_call_expr (isle_fn, 2,
2360 fold_build1_loc (loc, ABS_EXPR, type, arg),
2361 build_real (type, r));
2362 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2364 case BUILT_IN_ISNORMAL:
2366 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2367 islessequal(fabs(x),DBL_MAX). */
2368 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2369 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2370 tree const type = TREE_TYPE (arg);
2371 REAL_VALUE_TYPE rmax, rmin;
2374 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2375 real_from_string (&rmax, buf);
2376 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2377 real_from_string (&rmin, buf);
2378 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
2379 result = build_call_expr (isle_fn, 2, arg,
2380 build_real (type, rmax));
2381 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2382 build_call_expr (isge_fn, 2, arg,
2383 build_real (type, rmin)));
2384 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2390 target = expand_call (exp, target, target == const0_rtx);
2395 /* Expand a call to the builtin sincos math function.
2396 Return NULL_RTX if a normal call should be emitted rather than expanding the
2397 function in-line. EXP is the expression that is a call to the builtin
2401 expand_builtin_sincos (tree exp)
2403 rtx op0, op1, op2, target1, target2;
2404 enum machine_mode mode;
2405 tree arg, sinp, cosp;
2407 location_t loc = EXPR_LOCATION (exp);
2409 if (!validate_arglist (exp, REAL_TYPE,
2410 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2413 arg = CALL_EXPR_ARG (exp, 0);
2414 sinp = CALL_EXPR_ARG (exp, 1);
2415 cosp = CALL_EXPR_ARG (exp, 2);
2417 /* Make a suitable register to place result in. */
2418 mode = TYPE_MODE (TREE_TYPE (arg));
2420 /* Check if sincos insn is available, otherwise emit the call. */
2421 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2424 target1 = gen_reg_rtx (mode);
2425 target2 = gen_reg_rtx (mode);
2427 op0 = expand_normal (arg);
2428 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2429 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2431 /* Compute into target1 and target2.
2432 Set TARGET to wherever the result comes back. */
2433 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2434 gcc_assert (result);
2436 /* Move target1 and target2 to the memory locations indicated
2438 emit_move_insn (op1, target1);
2439 emit_move_insn (op2, target2);
2444 /* Expand a call to the internal cexpi builtin to the sincos math function.
2445 EXP is the expression that is a call to the builtin function; if convenient,
2446 the result should be placed in TARGET. SUBTARGET may be used as the target
2447 for computing one of EXP's operands. */
2450 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2452 tree fndecl = get_callee_fndecl (exp);
2454 enum machine_mode mode;
2456 location_t loc = EXPR_LOCATION (exp);
2458 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2461 arg = CALL_EXPR_ARG (exp, 0);
2462 type = TREE_TYPE (arg);
2463 mode = TYPE_MODE (TREE_TYPE (arg));
2465 /* Try expanding via a sincos optab, fall back to emitting a libcall
2466 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2467 is only generated from sincos, cexp or if we have either of them. */
2468 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2470 op1 = gen_reg_rtx (mode);
2471 op2 = gen_reg_rtx (mode);
2473 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2475 /* Compute into op1 and op2. */
2476 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2478 else if (TARGET_HAS_SINCOS)
2480 tree call, fn = NULL_TREE;
2484 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2485 fn = built_in_decls[BUILT_IN_SINCOSF];
2486 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2487 fn = built_in_decls[BUILT_IN_SINCOS];
2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2489 fn = built_in_decls[BUILT_IN_SINCOSL];
2493 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2494 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2495 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2496 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2497 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2498 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2500 /* Make sure not to fold the sincos call again. */
2501 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2502 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2503 call, 3, arg, top1, top2));
2507 tree call, fn = NULL_TREE, narg;
2508 tree ctype = build_complex_type (type);
2510 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2511 fn = built_in_decls[BUILT_IN_CEXPF];
2512 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2513 fn = built_in_decls[BUILT_IN_CEXP];
2514 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2515 fn = built_in_decls[BUILT_IN_CEXPL];
2519 /* If we don't have a decl for cexp create one. This is the
2520 friendliest fallback if the user calls __builtin_cexpi
2521 without full target C99 function support. */
2522 if (fn == NULL_TREE)
2525 const char *name = NULL;
2527 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2529 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2531 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2534 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2535 fn = build_fn_decl (name, fntype);
2538 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2539 build_real (type, dconst0), arg);
2541 /* Make sure not to fold the cexp call again. */
2542 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2543 return expand_expr (build_call_nary (ctype, call, 1, narg),
2544 target, VOIDmode, EXPAND_NORMAL);
2547 /* Now build the proper return type. */
2548 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2549 make_tree (TREE_TYPE (arg), op2),
2550 make_tree (TREE_TYPE (arg), op1)),
2551 target, VOIDmode, EXPAND_NORMAL);
2554 /* Expand a call to one of the builtin rounding functions gcc defines
2555 as an extension (lfloor and lceil). As these are gcc extensions we
2556 do not need to worry about setting errno to EDOM.
2557 If expanding via optab fails, lower expression to (int)(floor(x)).
2558 EXP is the expression that is a call to the builtin function;
2559 if convenient, the result should be placed in TARGET. */
2562 expand_builtin_int_roundingfn (tree exp, rtx target)
2564 convert_optab builtin_optab;
2565 rtx op0, insns, tmp;
2566 tree fndecl = get_callee_fndecl (exp);
2567 enum built_in_function fallback_fn;
2568 tree fallback_fndecl;
2569 enum machine_mode mode;
2572 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2575 arg = CALL_EXPR_ARG (exp, 0);
2577 switch (DECL_FUNCTION_CODE (fndecl))
2579 CASE_FLT_FN (BUILT_IN_LCEIL):
2580 CASE_FLT_FN (BUILT_IN_LLCEIL):
2581 builtin_optab = lceil_optab;
2582 fallback_fn = BUILT_IN_CEIL;
2585 CASE_FLT_FN (BUILT_IN_LFLOOR):
2586 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2587 builtin_optab = lfloor_optab;
2588 fallback_fn = BUILT_IN_FLOOR;
2595 /* Make a suitable register to place result in. */
2596 mode = TYPE_MODE (TREE_TYPE (exp));
2598 target = gen_reg_rtx (mode);
2600 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2601 need to expand the argument again. This way, we will not perform
2602 side-effects more the once. */
2603 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2605 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2609 /* Compute into TARGET. */
2610 if (expand_sfix_optab (target, op0, builtin_optab))
2612 /* Output the entire sequence. */
2613 insns = get_insns ();
2619 /* If we were unable to expand via the builtin, stop the sequence
2620 (without outputting the insns). */
2623 /* Fall back to floating point rounding optab. */
2624 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2626 /* For non-C99 targets we may end up without a fallback fndecl here
2627 if the user called __builtin_lfloor directly. In this case emit
2628 a call to the floor/ceil variants nevertheless. This should result
2629 in the best user experience for not full C99 targets. */
2630 if (fallback_fndecl == NULL_TREE)
2633 const char *name = NULL;
2635 switch (DECL_FUNCTION_CODE (fndecl))
2637 case BUILT_IN_LCEIL:
2638 case BUILT_IN_LLCEIL:
2641 case BUILT_IN_LCEILF:
2642 case BUILT_IN_LLCEILF:
2645 case BUILT_IN_LCEILL:
2646 case BUILT_IN_LLCEILL:
2649 case BUILT_IN_LFLOOR:
2650 case BUILT_IN_LLFLOOR:
2653 case BUILT_IN_LFLOORF:
2654 case BUILT_IN_LLFLOORF:
2657 case BUILT_IN_LFLOORL:
2658 case BUILT_IN_LLFLOORL:
2665 fntype = build_function_type_list (TREE_TYPE (arg),
2666 TREE_TYPE (arg), NULL_TREE);
2667 fallback_fndecl = build_fn_decl (name, fntype);
2670 exp = build_call_expr (fallback_fndecl, 1, arg);
2672 tmp = expand_normal (exp);
2674 /* Truncate the result of floating point optab to integer
2675 via expand_fix (). */
2676 target = gen_reg_rtx (mode);
2677 expand_fix (target, tmp, 0);
2682 /* Expand a call to one of the builtin math functions doing integer
2684 Return 0 if a normal call should be emitted rather than expanding the
2685 function in-line. EXP is the expression that is a call to the builtin
2686 function; if convenient, the result should be placed in TARGET. */
2689 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2691 convert_optab builtin_optab;
2693 tree fndecl = get_callee_fndecl (exp);
2695 enum machine_mode mode;
2697 /* There's no easy way to detect the case we need to set EDOM. */
2698 if (flag_errno_math)
2701 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2704 arg = CALL_EXPR_ARG (exp, 0);
2706 switch (DECL_FUNCTION_CODE (fndecl))
2708 CASE_FLT_FN (BUILT_IN_LRINT):
2709 CASE_FLT_FN (BUILT_IN_LLRINT):
2710 builtin_optab = lrint_optab; break;
2711 CASE_FLT_FN (BUILT_IN_LROUND):
2712 CASE_FLT_FN (BUILT_IN_LLROUND):
2713 builtin_optab = lround_optab; break;
2718 /* Make a suitable register to place result in. */
2719 mode = TYPE_MODE (TREE_TYPE (exp));
2721 target = gen_reg_rtx (mode);
2723 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2724 need to expand the argument again. This way, we will not perform
2725 side-effects more the once. */
2726 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2728 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2732 if (expand_sfix_optab (target, op0, builtin_optab))
2734 /* Output the entire sequence. */
2735 insns = get_insns ();
2741 /* If we were unable to expand via the builtin, stop the sequence
2742 (without outputting the insns) and call to the library function
2743 with the stabilized argument list. */
2746 target = expand_call (exp, target, target == const0_rtx);
2751 /* To evaluate powi(x,n), the floating point value x raised to the
2752 constant integer exponent n, we use a hybrid algorithm that
2753 combines the "window method" with look-up tables. For an
2754 introduction to exponentiation algorithms and "addition chains",
2755 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2756 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2757 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2758 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2760 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2761 multiplications to inline before calling the system library's pow
2762 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2763 so this default never requires calling pow, powf or powl. */
2765 #ifndef POWI_MAX_MULTS
2766 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2769 /* The size of the "optimal power tree" lookup table. All
2770 exponents less than this value are simply looked up in the
2771 powi_table below. This threshold is also used to size the
2772 cache of pseudo registers that hold intermediate results. */
2773 #define POWI_TABLE_SIZE 256
2775 /* The size, in bits of the window, used in the "window method"
2776 exponentiation algorithm. This is equivalent to a radix of
2777 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2778 #define POWI_WINDOW_SIZE 3
2780 /* The following table is an efficient representation of an
2781 "optimal power tree". For each value, i, the corresponding
2782 value, j, in the table states than an optimal evaluation
2783 sequence for calculating pow(x,i) can be found by evaluating
2784 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2785 100 integers is given in Knuth's "Seminumerical algorithms". */
2787 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2789 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2790 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2791 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2792 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2793 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2794 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2795 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2796 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2797 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2798 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2799 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2800 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2801 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2802 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2803 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2804 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2805 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2806 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2807 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2808 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2809 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2810 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2811 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2812 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2813 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2814 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2815 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2816 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2817 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2818 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2819 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2820 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2824 /* Return the number of multiplications required to calculate
2825 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2826 subroutine of powi_cost. CACHE is an array indicating
2827 which exponents have already been calculated. */
2830 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2832 /* If we've already calculated this exponent, then this evaluation
2833 doesn't require any additional multiplications. */
2838 return powi_lookup_cost (n - powi_table[n], cache)
2839 + powi_lookup_cost (powi_table[n], cache) + 1;
2842 /* Return the number of multiplications required to calculate
2843 powi(x,n) for an arbitrary x, given the exponent N. This
2844 function needs to be kept in sync with expand_powi below. */
2847 powi_cost (HOST_WIDE_INT n)
2849 bool cache[POWI_TABLE_SIZE];
2850 unsigned HOST_WIDE_INT digit;
2851 unsigned HOST_WIDE_INT val;
2857 /* Ignore the reciprocal when calculating the cost. */
2858 val = (n < 0) ? -n : n;
2860 /* Initialize the exponent cache. */
2861 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2866 while (val >= POWI_TABLE_SIZE)
2870 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2871 result += powi_lookup_cost (digit, cache)
2872 + POWI_WINDOW_SIZE + 1;
2873 val >>= POWI_WINDOW_SIZE;
2882 return result + powi_lookup_cost (val, cache);
2885 /* Recursive subroutine of expand_powi. This function takes the array,
2886 CACHE, of already calculated exponents and an exponent N and returns
2887 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2890 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2892 unsigned HOST_WIDE_INT digit;
2896 if (n < POWI_TABLE_SIZE)
2901 target = gen_reg_rtx (mode);
2904 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2905 op1 = expand_powi_1 (mode, powi_table[n], cache);
2909 target = gen_reg_rtx (mode);
2910 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2911 op0 = expand_powi_1 (mode, n - digit, cache);
2912 op1 = expand_powi_1 (mode, digit, cache);
2916 target = gen_reg_rtx (mode);
2917 op0 = expand_powi_1 (mode, n >> 1, cache);
2921 result = expand_mult (mode, op0, op1, target, 0);
2922 if (result != target)
2923 emit_move_insn (target, result);
2927 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2928 floating point operand in mode MODE, and N is the exponent. This
2929 function needs to be kept in sync with powi_cost above. */
2932 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2934 unsigned HOST_WIDE_INT val;
2935 rtx cache[POWI_TABLE_SIZE];
2939 return CONST1_RTX (mode);
2941 val = (n < 0) ? -n : n;
2943 memset (cache, 0, sizeof (cache));
2946 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2948 /* If the original exponent was negative, reciprocate the result. */
2950 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2951 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2956 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2957 a normal call should be emitted rather than expanding the function
2958 in-line. EXP is the expression that is a call to the builtin
2959 function; if convenient, the result should be placed in TARGET. */
2962 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2966 tree type = TREE_TYPE (exp);
2967 REAL_VALUE_TYPE cint, c, c2;
2970 enum machine_mode mode = TYPE_MODE (type);
2972 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2975 arg0 = CALL_EXPR_ARG (exp, 0);
2976 arg1 = CALL_EXPR_ARG (exp, 1);
2978 if (TREE_CODE (arg1) != REAL_CST
2979 || TREE_OVERFLOW (arg1))
2980 return expand_builtin_mathfn_2 (exp, target, subtarget);
2982 /* Handle constant exponents. */
2984 /* For integer valued exponents we can expand to an optimal multiplication
2985 sequence using expand_powi. */
2986 c = TREE_REAL_CST (arg1);
2987 n = real_to_integer (&c);
2988 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2989 if (real_identical (&c, &cint)
2990 && ((n >= -1 && n <= 2)
2991 || (flag_unsafe_math_optimizations
2992 && optimize_insn_for_speed_p ()
2993 && powi_cost (n) <= POWI_MAX_MULTS)))
2995 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2998 op = force_reg (mode, op);
2999 op = expand_powi (op, mode, n);
3004 narg0 = builtin_save_expr (arg0);
3006 /* If the exponent is not integer valued, check if it is half of an integer.
3007 In this case we can expand to sqrt (x) * x**(n/2). */
3008 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3009 if (fn != NULL_TREE)
3011 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3012 n = real_to_integer (&c2);
3013 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3014 if (real_identical (&c2, &cint)
3015 && ((flag_unsafe_math_optimizations
3016 && optimize_insn_for_speed_p ()
3017 && powi_cost (n/2) <= POWI_MAX_MULTS)
3020 tree call_expr = build_call_expr (fn, 1, narg0);
3021 /* Use expand_expr in case the newly built call expression
3022 was folded to a non-call. */
3023 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3026 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3027 op2 = force_reg (mode, op2);
3028 op2 = expand_powi (op2, mode, abs (n / 2));
3029 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3030 0, OPTAB_LIB_WIDEN);
3031 /* If the original exponent was negative, reciprocate the
3034 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3035 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3041 /* Try if the exponent is a third of an integer. In this case
3042 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3043 different from pow (x, 1./3.) due to rounding and behavior
3044 with negative x we need to constrain this transformation to
3045 unsafe math and positive x or finite math. */
3046 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3048 && flag_unsafe_math_optimizations
3049 && (tree_expr_nonnegative_p (arg0)
3050 || !HONOR_NANS (mode)))
3052 REAL_VALUE_TYPE dconst3;
3053 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3054 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3055 real_round (&c2, mode, &c2);
3056 n = real_to_integer (&c2);
3057 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3058 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3059 real_convert (&c2, mode, &c2);
3060 if (real_identical (&c2, &c)
3061 && ((optimize_insn_for_speed_p ()
3062 && powi_cost (n/3) <= POWI_MAX_MULTS)
3065 tree call_expr = build_call_expr (fn, 1,narg0);
3066 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3067 if (abs (n) % 3 == 2)
3068 op = expand_simple_binop (mode, MULT, op, op, op,
3069 0, OPTAB_LIB_WIDEN);
3072 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3073 op2 = force_reg (mode, op2);
3074 op2 = expand_powi (op2, mode, abs (n / 3));
3075 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3076 0, OPTAB_LIB_WIDEN);
3077 /* If the original exponent was negative, reciprocate the
3080 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3081 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3087 /* Fall back to optab expansion. */
3088 return expand_builtin_mathfn_2 (exp, target, subtarget);
3091 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3092 a normal call should be emitted rather than expanding the function
3093 in-line. EXP is the expression that is a call to the builtin
3094 function; if convenient, the result should be placed in TARGET. */
3097 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3101 enum machine_mode mode;
3102 enum machine_mode mode2;
3104 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3107 arg0 = CALL_EXPR_ARG (exp, 0);
3108 arg1 = CALL_EXPR_ARG (exp, 1);
3109 mode = TYPE_MODE (TREE_TYPE (exp));
3111 /* Handle constant power. */
3113 if (TREE_CODE (arg1) == INTEGER_CST
3114 && !TREE_OVERFLOW (arg1))
3116 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3118 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3119 Otherwise, check the number of multiplications required. */
3120 if ((TREE_INT_CST_HIGH (arg1) == 0
3121 || TREE_INT_CST_HIGH (arg1) == -1)
3122 && ((n >= -1 && n <= 2)
3123 || (optimize_insn_for_speed_p ()
3124 && powi_cost (n) <= POWI_MAX_MULTS)))
3126 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3127 op0 = force_reg (mode, op0);
3128 return expand_powi (op0, mode, n);
3132 /* Emit a libcall to libgcc. */
3134 /* Mode of the 2nd argument must match that of an int. */
3135 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3137 if (target == NULL_RTX)
3138 target = gen_reg_rtx (mode);
3140 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3141 if (GET_MODE (op0) != mode)
3142 op0 = convert_to_mode (mode, op0, 0);
3143 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3144 if (GET_MODE (op1) != mode2)
3145 op1 = convert_to_mode (mode2, op1, 0);
3147 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3148 target, LCT_CONST, mode, 2,
3149 op0, mode, op1, mode2);
3154 /* Expand expression EXP which is a call to the strlen builtin. Return
3155 NULL_RTX if we failed the caller should emit a normal call, otherwise
3156 try to get the result in TARGET, if convenient. */
3159 expand_builtin_strlen (tree exp, rtx target,
3160 enum machine_mode target_mode)
3162 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3168 tree src = CALL_EXPR_ARG (exp, 0);
3169 rtx result, src_reg, char_rtx, before_strlen;
3170 enum machine_mode insn_mode = target_mode, char_mode;
3171 enum insn_code icode = CODE_FOR_nothing;
3174 /* If the length can be computed at compile-time, return it. */
3175 len = c_strlen (src, 0);
3177 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3179 /* If the length can be computed at compile-time and is constant
3180 integer, but there are side-effects in src, evaluate
3181 src for side-effects, then return len.
3182 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3183 can be optimized into: i++; x = 3; */
3184 len = c_strlen (src, 1);
3185 if (len && TREE_CODE (len) == INTEGER_CST)
3187 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3188 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3191 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3193 /* If SRC is not a pointer type, don't do this operation inline. */
3197 /* Bail out if we can't compute strlen in the right mode. */
3198 while (insn_mode != VOIDmode)
3200 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3201 if (icode != CODE_FOR_nothing)
3204 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3206 if (insn_mode == VOIDmode)
3209 /* Make a place to write the result of the instruction. */
3213 && GET_MODE (result) == insn_mode
3214 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3215 result = gen_reg_rtx (insn_mode);
3217 /* Make a place to hold the source address. We will not expand
3218 the actual source until we are sure that the expansion will
3219 not fail -- there are trees that cannot be expanded twice. */
3220 src_reg = gen_reg_rtx (Pmode);
3222 /* Mark the beginning of the strlen sequence so we can emit the
3223 source operand later. */
3224 before_strlen = get_last_insn ();
3226 char_rtx = const0_rtx;
3227 char_mode = insn_data[(int) icode].operand[2].mode;
3228 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3230 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3232 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3233 char_rtx, GEN_INT (align));
3238 /* Now that we are assured of success, expand the source. */
3240 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3242 emit_move_insn (src_reg, pat);
3247 emit_insn_after (pat, before_strlen);
3249 emit_insn_before (pat, get_insns ());
3251 /* Return the value in the proper mode for this function. */
3252 if (GET_MODE (result) == target_mode)
3254 else if (target != 0)
3255 convert_move (target, result, 0);
3257 target = convert_to_mode (target_mode, result, 0);
3263 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3264 caller should emit a normal call, otherwise try to get the result
3265 in TARGET, if convenient (and in mode MODE if that's convenient). */
3268 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3270 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3272 tree type = TREE_TYPE (exp);
3273 tree result = fold_builtin_strstr (EXPR_LOCATION (exp),
3274 CALL_EXPR_ARG (exp, 0),
3275 CALL_EXPR_ARG (exp, 1), type);
3277 return expand_expr (result, target, mode, EXPAND_NORMAL);
3282 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3283 caller should emit a normal call, otherwise try to get the result
3284 in TARGET, if convenient (and in mode MODE if that's convenient). */
3287 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3289 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3291 tree type = TREE_TYPE (exp);
3292 tree result = fold_builtin_strchr (EXPR_LOCATION (exp),
3293 CALL_EXPR_ARG (exp, 0),
3294 CALL_EXPR_ARG (exp, 1), type);
3296 return expand_expr (result, target, mode, EXPAND_NORMAL);
3298 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3303 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3304 caller should emit a normal call, otherwise try to get the result
3305 in TARGET, if convenient (and in mode MODE if that's convenient). */
3308 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3310 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3312 tree type = TREE_TYPE (exp);
3313 tree result = fold_builtin_strrchr (EXPR_LOCATION (exp),
3314 CALL_EXPR_ARG (exp, 0),
3315 CALL_EXPR_ARG (exp, 1), type);
3317 return expand_expr (result, target, mode, EXPAND_NORMAL);
3322 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3323 caller should emit a normal call, otherwise try to get the result
3324 in TARGET, if convenient (and in mode MODE if that's convenient). */
3327 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3329 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3331 tree type = TREE_TYPE (exp);
3332 tree result = fold_builtin_strpbrk (EXPR_LOCATION (exp),
3333 CALL_EXPR_ARG (exp, 0),
3334 CALL_EXPR_ARG (exp, 1), type);
3336 return expand_expr (result, target, mode, EXPAND_NORMAL);
3341 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3342 bytes from constant string DATA + OFFSET and return it as target
3346 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3347 enum machine_mode mode)
3349 const char *str = (const char *) data;
3351 gcc_assert (offset >= 0
3352 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3353 <= strlen (str) + 1));
3355 return c_readstr (str + offset, mode);
3358 /* Expand a call EXP to the memcpy builtin.
3359 Return NULL_RTX if we failed, the caller should emit a normal call,
3360 otherwise try to get the result in TARGET, if convenient (and in
3361 mode MODE if that's convenient). */
3364 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3366 tree fndecl = get_callee_fndecl (exp);
3368 if (!validate_arglist (exp,
3369 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3373 tree dest = CALL_EXPR_ARG (exp, 0);
3374 tree src = CALL_EXPR_ARG (exp, 1);
3375 tree len = CALL_EXPR_ARG (exp, 2);
3376 const char *src_str;
3377 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3378 unsigned int dest_align
3379 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3380 rtx dest_mem, src_mem, dest_addr, len_rtx;
3381 tree result = fold_builtin_memory_op (EXPR_LOCATION (exp),
3383 TREE_TYPE (TREE_TYPE (fndecl)),
3385 HOST_WIDE_INT expected_size = -1;
3386 unsigned int expected_align = 0;
3387 tree_ann_common_t ann;
3391 while (TREE_CODE (result) == COMPOUND_EXPR)
3393 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3395 result = TREE_OPERAND (result, 1);
3397 return expand_expr (result, target, mode, EXPAND_NORMAL);
3400 /* If DEST is not a pointer type, call the normal function. */
3401 if (dest_align == 0)
3404 /* If either SRC is not a pointer type, don't do this
3405 operation in-line. */
3409 ann = tree_common_ann (exp);
3411 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3413 if (expected_align < dest_align)
3414 expected_align = dest_align;
3415 dest_mem = get_memory_rtx (dest, len);
3416 set_mem_align (dest_mem, dest_align);
3417 len_rtx = expand_normal (len);
3418 src_str = c_getstr (src);
3420 /* If SRC is a string constant and block move would be done
3421 by pieces, we can avoid loading the string from memory
3422 and only stored the computed constants. */
3424 && CONST_INT_P (len_rtx)
3425 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3426 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3427 CONST_CAST (char *, src_str),
3430 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3431 builtin_memcpy_read_str,
3432 CONST_CAST (char *, src_str),
3433 dest_align, false, 0);
3434 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3435 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3439 src_mem = get_memory_rtx (src, len);
3440 set_mem_align (src_mem, src_align);
3442 /* Copy word part most expediently. */
3443 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3444 CALL_EXPR_TAILCALL (exp)
3445 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3446 expected_align, expected_size);
3450 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3451 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3457 /* Expand a call EXP to the mempcpy builtin.
3458 Return NULL_RTX if we failed; the caller should emit a normal call,
3459 otherwise try to get the result in TARGET, if convenient (and in
3460 mode MODE if that's convenient). If ENDP is 0 return the
3461 destination pointer, if ENDP is 1 return the end pointer ala
3462 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3466 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3468 if (!validate_arglist (exp,
3469 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3473 tree dest = CALL_EXPR_ARG (exp, 0);
3474 tree src = CALL_EXPR_ARG (exp, 1);
3475 tree len = CALL_EXPR_ARG (exp, 2);
3476 return expand_builtin_mempcpy_args (dest, src, len,
3478 target, mode, /*endp=*/ 1);
3482 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3483 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3484 so that this can also be called without constructing an actual CALL_EXPR.
3485 TYPE is the return type of the call. The other arguments and return value
3486 are the same as for expand_builtin_mempcpy. */
3489 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3490 rtx target, enum machine_mode mode, int endp)
3492 /* If return value is ignored, transform mempcpy into memcpy. */
3493 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3495 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3496 tree result = build_call_expr (fn, 3, dest, src, len);
3498 while (TREE_CODE (result) == COMPOUND_EXPR)
3500 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3502 result = TREE_OPERAND (result, 1);
3504 return expand_expr (result, target, mode, EXPAND_NORMAL);
3508 const char *src_str;
3509 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3510 unsigned int dest_align
3511 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3512 rtx dest_mem, src_mem, len_rtx;
3513 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3514 dest, src, len, type, false, endp);
3518 while (TREE_CODE (result) == COMPOUND_EXPR)
3520 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3522 result = TREE_OPERAND (result, 1);
3524 return expand_expr (result, target, mode, EXPAND_NORMAL);
3527 /* If either SRC or DEST is not a pointer type, don't do this
3528 operation in-line. */
3529 if (dest_align == 0 || src_align == 0)
3532 /* If LEN is not constant, call the normal function. */
3533 if (! host_integerp (len, 1))
3536 len_rtx = expand_normal (len);
3537 src_str = c_getstr (src);
3539 /* If SRC is a string constant and block move would be done
3540 by pieces, we can avoid loading the string from memory
3541 and only stored the computed constants. */
3543 && CONST_INT_P (len_rtx)
3544 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3545 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3546 CONST_CAST (char *, src_str),
3549 dest_mem = get_memory_rtx (dest, len);
3550 set_mem_align (dest_mem, dest_align);
3551 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3552 builtin_memcpy_read_str,
3553 CONST_CAST (char *, src_str),
3554 dest_align, false, endp);
3555 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3556 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3560 if (CONST_INT_P (len_rtx)
3561 && can_move_by_pieces (INTVAL (len_rtx),
3562 MIN (dest_align, src_align)))
3564 dest_mem = get_memory_rtx (dest, len);
3565 set_mem_align (dest_mem, dest_align);
3566 src_mem = get_memory_rtx (src, len);
3567 set_mem_align (src_mem, src_align);
3568 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3569 MIN (dest_align, src_align), endp);
3570 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3571 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3579 /* Expand expression EXP, which is a call to the memmove builtin. Return
3580 NULL_RTX if we failed; the caller should emit a normal call. */
3583 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3585 if (!validate_arglist (exp,
3586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3590 tree dest = CALL_EXPR_ARG (exp, 0);
3591 tree src = CALL_EXPR_ARG (exp, 1);
3592 tree len = CALL_EXPR_ARG (exp, 2);
3593 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3594 target, mode, ignore);
3598 /* Helper function to do the actual work for expand_builtin_memmove. The
3599 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3600 so that this can also be called without constructing an actual CALL_EXPR.
3601 TYPE is the return type of the call. The other arguments and return value
3602 are the same as for expand_builtin_memmove. */
3605 expand_builtin_memmove_args (tree dest, tree src, tree len,
3606 tree type, rtx target, enum machine_mode mode,
3609 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3610 dest, src, len, type, ignore, /*endp=*/3);
3614 STRIP_TYPE_NOPS (result);
3615 while (TREE_CODE (result) == COMPOUND_EXPR)
3617 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3619 result = TREE_OPERAND (result, 1);
3621 return expand_expr (result, target, mode, EXPAND_NORMAL);
3624 /* Otherwise, call the normal function. */
3628 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3629 NULL_RTX if we failed the caller should emit a normal call. */
3632 expand_builtin_bcopy (tree exp, int ignore)
3634 tree type = TREE_TYPE (exp);
3635 tree src, dest, size;
3636 location_t loc = EXPR_LOCATION (exp);
3638 if (!validate_arglist (exp,
3639 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3642 src = CALL_EXPR_ARG (exp, 0);
3643 dest = CALL_EXPR_ARG (exp, 1);
3644 size = CALL_EXPR_ARG (exp, 2);
3646 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3647 This is done this way so that if it isn't expanded inline, we fall
3648 back to calling bcopy instead of memmove. */
3649 return expand_builtin_memmove_args (dest, src,
3650 fold_convert_loc (loc, sizetype, size),
3651 type, const0_rtx, VOIDmode,
3656 # define HAVE_movstr 0
3657 # define CODE_FOR_movstr CODE_FOR_nothing
3660 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3661 we failed, the caller should emit a normal call, otherwise try to
3662 get the result in TARGET, if convenient. If ENDP is 0 return the
3663 destination pointer, if ENDP is 1 return the end pointer ala
3664 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3668 expand_movstr (tree dest, tree src, rtx target, int endp)
3674 const struct insn_data * data;
3679 dest_mem = get_memory_rtx (dest, NULL);
3680 src_mem = get_memory_rtx (src, NULL);
3683 target = force_reg (Pmode, XEXP (dest_mem, 0));
3684 dest_mem = replace_equiv_address (dest_mem, target);
3685 end = gen_reg_rtx (Pmode);
3689 if (target == 0 || target == const0_rtx)
3691 end = gen_reg_rtx (Pmode);
3699 data = insn_data + CODE_FOR_movstr;
3701 if (data->operand[0].mode != VOIDmode)
3702 end = gen_lowpart (data->operand[0].mode, end);
3704 insn = data->genfun (end, dest_mem, src_mem);
3710 /* movstr is supposed to set end to the address of the NUL
3711 terminator. If the caller requested a mempcpy-like return value,
3713 if (endp == 1 && target != const0_rtx)
3715 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3716 emit_move_insn (target, force_operand (tem, NULL_RTX));
3722 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3723 NULL_RTX if we failed the caller should emit a normal call, otherwise
3724 try to get the result in TARGET, if convenient (and in mode MODE if that's
3728 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3730 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3732 tree dest = CALL_EXPR_ARG (exp, 0);
3733 tree src = CALL_EXPR_ARG (exp, 1);
3734 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3739 /* Helper function to do the actual work for expand_builtin_strcpy. The
3740 arguments to the builtin_strcpy call DEST and SRC are broken out
3741 so that this can also be called without constructing an actual CALL_EXPR.
3742 The other arguments and return value are the same as for
3743 expand_builtin_strcpy. */
3746 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3747 rtx target, enum machine_mode mode)
3749 tree result = fold_builtin_strcpy (UNKNOWN_LOCATION,
3750 fndecl, dest, src, 0);
3752 return expand_expr (result, target, mode, EXPAND_NORMAL);
3753 return expand_movstr (dest, src, target, /*endp=*/0);
3757 /* Expand a call EXP to the stpcpy builtin.
3758 Return NULL_RTX if we failed the caller should emit a normal call,
3759 otherwise try to get the result in TARGET, if convenient (and in
3760 mode MODE if that's convenient). */
3763 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3766 location_t loc = EXPR_LOCATION (exp);
3768 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3771 dst = CALL_EXPR_ARG (exp, 0);
3772 src = CALL_EXPR_ARG (exp, 1);
3774 /* If return value is ignored, transform stpcpy into strcpy. */
3775 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3777 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3778 tree result = build_call_expr (fn, 2, dst, src);
3780 STRIP_NOPS (result);
3781 while (TREE_CODE (result) == COMPOUND_EXPR)
3783 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3785 result = TREE_OPERAND (result, 1);
3787 return expand_expr (result, target, mode, EXPAND_NORMAL);
3794 /* Ensure we get an actual string whose length can be evaluated at
3795 compile-time, not an expression containing a string. This is
3796 because the latter will potentially produce pessimized code
3797 when used to produce the return value. */
3798 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3799 return expand_movstr (dst, src, target, /*endp=*/2);
3801 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3802 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3803 target, mode, /*endp=*/2);
3808 if (TREE_CODE (len) == INTEGER_CST)
3810 rtx len_rtx = expand_normal (len);
3812 if (CONST_INT_P (len_rtx))
3814 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3815 dst, src, target, mode);
3821 if (mode != VOIDmode)
3822 target = gen_reg_rtx (mode);
3824 target = gen_reg_rtx (GET_MODE (ret));
3826 if (GET_MODE (target) != GET_MODE (ret))
3827 ret = gen_lowpart (GET_MODE (target), ret);
3829 ret = plus_constant (ret, INTVAL (len_rtx));
3830 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3838 return expand_movstr (dst, src, target, /*endp=*/2);
3842 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3843 bytes from constant string DATA + OFFSET and return it as target
3847 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3848 enum machine_mode mode)
3850 const char *str = (const char *) data;
3852 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3855 return c_readstr (str + offset, mode);
3858 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3859 NULL_RTX if we failed the caller should emit a normal call. */
3862 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3864 tree fndecl = get_callee_fndecl (exp);
3865 location_t loc = EXPR_LOCATION (exp);
3867 if (validate_arglist (exp,
3868 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3870 tree dest = CALL_EXPR_ARG (exp, 0);
3871 tree src = CALL_EXPR_ARG (exp, 1);
3872 tree len = CALL_EXPR_ARG (exp, 2);
3873 tree slen = c_strlen (src, 1);
3874 tree result = fold_builtin_strncpy (EXPR_LOCATION (exp),
3875 fndecl, dest, src, len, slen);
3879 while (TREE_CODE (result) == COMPOUND_EXPR)
3881 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3883 result = TREE_OPERAND (result, 1);
3885 return expand_expr (result, target, mode, EXPAND_NORMAL);
3888 /* We must be passed a constant len and src parameter. */
3889 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3892 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3894 /* We're required to pad with trailing zeros if the requested
3895 len is greater than strlen(s2)+1. In that case try to
3896 use store_by_pieces, if it fails, punt. */
3897 if (tree_int_cst_lt (slen, len))
3899 unsigned int dest_align
3900 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3901 const char *p = c_getstr (src);
3904 if (!p || dest_align == 0 || !host_integerp (len, 1)
3905 || !can_store_by_pieces (tree_low_cst (len, 1),
3906 builtin_strncpy_read_str,
3907 CONST_CAST (char *, p),
3911 dest_mem = get_memory_rtx (dest, len);
3912 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3913 builtin_strncpy_read_str,
3914 CONST_CAST (char *, p), dest_align, false, 0);
3915 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3916 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3923 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3924 bytes from constant string DATA + OFFSET and return it as target
3928 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3929 enum machine_mode mode)
3931 const char *c = (const char *) data;
3932 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3934 memset (p, *c, GET_MODE_SIZE (mode));
3936 return c_readstr (p, mode);
3939 /* Callback routine for store_by_pieces. Return the RTL of a register
3940 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3941 char value given in the RTL register data. For example, if mode is
3942 4 bytes wide, return the RTL for 0x01010101*data. */
3945 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3946 enum machine_mode mode)
3952 size = GET_MODE_SIZE (mode);
3956 p = XALLOCAVEC (char, size);
3957 memset (p, 1, size);
3958 coeff = c_readstr (p, mode);
3960 target = convert_to_mode (mode, (rtx) data, 1);
3961 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3962 return force_reg (mode, target);
3965 /* Expand expression EXP, which is a call to the memset builtin. Return
3966 NULL_RTX if we failed the caller should emit a normal call, otherwise
3967 try to get the result in TARGET, if convenient (and in mode MODE if that's
3971 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3973 if (!validate_arglist (exp,
3974 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3978 tree dest = CALL_EXPR_ARG (exp, 0);
3979 tree val = CALL_EXPR_ARG (exp, 1);
3980 tree len = CALL_EXPR_ARG (exp, 2);
3981 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3985 /* Helper function to do the actual work for expand_builtin_memset. The
3986 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3987 so that this can also be called without constructing an actual CALL_EXPR.
3988 The other arguments and return value are the same as for
3989 expand_builtin_memset. */
3992 expand_builtin_memset_args (tree dest, tree val, tree len,
3993 rtx target, enum machine_mode mode, tree orig_exp)
3996 enum built_in_function fcode;
3998 unsigned int dest_align;
3999 rtx dest_mem, dest_addr, len_rtx;
4000 HOST_WIDE_INT expected_size = -1;
4001 unsigned int expected_align = 0;
4002 tree_ann_common_t ann;
4004 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
4006 /* If DEST is not a pointer type, don't do this operation in-line. */
4007 if (dest_align == 0)
4010 ann = tree_common_ann (orig_exp);
4012 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
4014 if (expected_align < dest_align)
4015 expected_align = dest_align;
4017 /* If the LEN parameter is zero, return DEST. */
4018 if (integer_zerop (len))
4020 /* Evaluate and ignore VAL in case it has side-effects. */
4021 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4022 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4025 /* Stabilize the arguments in case we fail. */
4026 dest = builtin_save_expr (dest);
4027 val = builtin_save_expr (val);
4028 len = builtin_save_expr (len);
4030 len_rtx = expand_normal (len);
4031 dest_mem = get_memory_rtx (dest, len);
4033 if (TREE_CODE (val) != INTEGER_CST)
4037 val_rtx = expand_normal (val);
4038 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4041 /* Assume that we can memset by pieces if we can store
4042 * the coefficients by pieces (in the required modes).
4043 * We can't pass builtin_memset_gen_str as that emits RTL. */
4045 if (host_integerp (len, 1)
4046 && can_store_by_pieces (tree_low_cst (len, 1),
4047 builtin_memset_read_str, &c, dest_align,
4050 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4052 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4053 builtin_memset_gen_str, val_rtx, dest_align,
4056 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4057 dest_align, expected_align,
4061 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4062 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4066 if (target_char_cast (val, &c))
4071 if (host_integerp (len, 1)
4072 && can_store_by_pieces (tree_low_cst (len, 1),
4073 builtin_memset_read_str, &c, dest_align,
4075 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4076 builtin_memset_read_str, &c, dest_align, true, 0);
4077 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4078 dest_align, expected_align,
4082 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4083 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4087 set_mem_align (dest_mem, dest_align);
4088 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4089 CALL_EXPR_TAILCALL (orig_exp)
4090 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4091 expected_align, expected_size);
4095 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4096 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4102 fndecl = get_callee_fndecl (orig_exp);
4103 fcode = DECL_FUNCTION_CODE (fndecl);
4104 if (fcode == BUILT_IN_MEMSET)
4105 fn = build_call_expr (fndecl, 3, dest, val, len);
4106 else if (fcode == BUILT_IN_BZERO)
4107 fn = build_call_expr (fndecl, 2, dest, len);
4110 if (TREE_CODE (fn) == CALL_EXPR)
4111 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4112 return expand_call (fn, target, target == const0_rtx);
4115 /* Expand expression EXP, which is a call to the bzero builtin. Return
4116 NULL_RTX if we failed the caller should emit a normal call. */
4119 expand_builtin_bzero (tree exp)
4122 location_t loc = EXPR_LOCATION (exp);
4124 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4127 dest = CALL_EXPR_ARG (exp, 0);
4128 size = CALL_EXPR_ARG (exp, 1);
4130 /* New argument list transforming bzero(ptr x, int y) to
4131 memset(ptr x, int 0, size_t y). This is done this way
4132 so that if it isn't expanded inline, we fallback to
4133 calling bzero instead of memset. */
4135 return expand_builtin_memset_args (dest, integer_zero_node,
4136 fold_convert_loc (loc, sizetype, size),
4137 const0_rtx, VOIDmode, exp);
4140 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4141 caller should emit a normal call, otherwise try to get the result
4142 in TARGET, if convenient (and in mode MODE if that's convenient). */
4145 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4147 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4148 INTEGER_TYPE, VOID_TYPE))
4150 tree type = TREE_TYPE (exp);
4151 tree result = fold_builtin_memchr (EXPR_LOCATION (exp),
4152 CALL_EXPR_ARG (exp, 0),
4153 CALL_EXPR_ARG (exp, 1),
4154 CALL_EXPR_ARG (exp, 2), type);
4156 return expand_expr (result, target, mode, EXPAND_NORMAL);
4161 /* Expand expression EXP, which is a call to the memcmp built-in function.
4162 Return NULL_RTX if we failed and the
4163 caller should emit a normal call, otherwise try to get the result in
4164 TARGET, if convenient (and in mode MODE, if that's convenient). */
4167 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4169 location_t loc = EXPR_LOCATION (exp);
4171 if (!validate_arglist (exp,
4172 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4176 tree result = fold_builtin_memcmp (loc,
4177 CALL_EXPR_ARG (exp, 0),
4178 CALL_EXPR_ARG (exp, 1),
4179 CALL_EXPR_ARG (exp, 2));
4181 return expand_expr (result, target, mode, EXPAND_NORMAL);
4184 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4186 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4189 tree arg1 = CALL_EXPR_ARG (exp, 0);
4190 tree arg2 = CALL_EXPR_ARG (exp, 1);
4191 tree len = CALL_EXPR_ARG (exp, 2);
4194 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4196 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4197 enum machine_mode insn_mode;
4199 #ifdef HAVE_cmpmemsi
4201 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4204 #ifdef HAVE_cmpstrnsi
4206 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4211 /* If we don't have POINTER_TYPE, call the function. */
4212 if (arg1_align == 0 || arg2_align == 0)
4215 /* Make a place to write the result of the instruction. */
4218 && REG_P (result) && GET_MODE (result) == insn_mode
4219 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4220 result = gen_reg_rtx (insn_mode);
4222 arg1_rtx = get_memory_rtx (arg1, len);
4223 arg2_rtx = get_memory_rtx (arg2, len);
4224 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4226 /* Set MEM_SIZE as appropriate. */
4227 if (CONST_INT_P (arg3_rtx))
4229 set_mem_size (arg1_rtx, arg3_rtx);
4230 set_mem_size (arg2_rtx, arg3_rtx);
4233 #ifdef HAVE_cmpmemsi
4235 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4236 GEN_INT (MIN (arg1_align, arg2_align)));
4239 #ifdef HAVE_cmpstrnsi
4241 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4242 GEN_INT (MIN (arg1_align, arg2_align)));
4250 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4251 TYPE_MODE (integer_type_node), 3,
4252 XEXP (arg1_rtx, 0), Pmode,
4253 XEXP (arg2_rtx, 0), Pmode,
4254 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4255 TYPE_UNSIGNED (sizetype)),
4256 TYPE_MODE (sizetype));
4258 /* Return the value in the proper mode for this function. */
4259 mode = TYPE_MODE (TREE_TYPE (exp));
4260 if (GET_MODE (result) == mode)
4262 else if (target != 0)
4264 convert_move (target, result, 0);
4268 return convert_to_mode (mode, result, 0);
4275 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4276 if we failed the caller should emit a normal call, otherwise try to get
4277 the result in TARGET, if convenient. */
4280 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4282 location_t loc = EXPR_LOCATION (exp);
4284 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4288 tree result = fold_builtin_strcmp (loc,
4289 CALL_EXPR_ARG (exp, 0),
4290 CALL_EXPR_ARG (exp, 1));
4292 return expand_expr (result, target, mode, EXPAND_NORMAL);
4295 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4296 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4297 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4299 rtx arg1_rtx, arg2_rtx;
4300 rtx result, insn = NULL_RTX;
4302 tree arg1 = CALL_EXPR_ARG (exp, 0);
4303 tree arg2 = CALL_EXPR_ARG (exp, 1);
4306 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4308 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4310 /* If we don't have POINTER_TYPE, call the function. */
4311 if (arg1_align == 0 || arg2_align == 0)
4314 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4315 arg1 = builtin_save_expr (arg1);
4316 arg2 = builtin_save_expr (arg2);
4318 arg1_rtx = get_memory_rtx (arg1, NULL);
4319 arg2_rtx = get_memory_rtx (arg2, NULL);
4321 #ifdef HAVE_cmpstrsi
4322 /* Try to call cmpstrsi. */
4325 enum machine_mode insn_mode
4326 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4328 /* Make a place to write the result of the instruction. */
4331 && REG_P (result) && GET_MODE (result) == insn_mode
4332 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4333 result = gen_reg_rtx (insn_mode);
4335 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4336 GEN_INT (MIN (arg1_align, arg2_align)));
4339 #ifdef HAVE_cmpstrnsi
4340 /* Try to determine at least one length and call cmpstrnsi. */
4341 if (!insn && HAVE_cmpstrnsi)
4346 enum machine_mode insn_mode
4347 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4348 tree len1 = c_strlen (arg1, 1);
4349 tree len2 = c_strlen (arg2, 1);
4352 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4354 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4356 /* If we don't have a constant length for the first, use the length
4357 of the second, if we know it. We don't require a constant for
4358 this case; some cost analysis could be done if both are available
4359 but neither is constant. For now, assume they're equally cheap,
4360 unless one has side effects. If both strings have constant lengths,
4367 else if (TREE_SIDE_EFFECTS (len1))
4369 else if (TREE_SIDE_EFFECTS (len2))
4371 else if (TREE_CODE (len1) != INTEGER_CST)
4373 else if (TREE_CODE (len2) != INTEGER_CST)
4375 else if (tree_int_cst_lt (len1, len2))
4380 /* If both arguments have side effects, we cannot optimize. */
4381 if (!len || TREE_SIDE_EFFECTS (len))
4384 arg3_rtx = expand_normal (len);
4386 /* Make a place to write the result of the instruction. */
4389 && REG_P (result) && GET_MODE (result) == insn_mode
4390 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4391 result = gen_reg_rtx (insn_mode);
4393 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4394 GEN_INT (MIN (arg1_align, arg2_align)));
4402 /* Return the value in the proper mode for this function. */
4403 mode = TYPE_MODE (TREE_TYPE (exp));
4404 if (GET_MODE (result) == mode)
4407 return convert_to_mode (mode, result, 0);
4408 convert_move (target, result, 0);
4412 /* Expand the library call ourselves using a stabilized argument
4413 list to avoid re-evaluating the function's arguments twice. */
4414 #ifdef HAVE_cmpstrnsi
4417 fndecl = get_callee_fndecl (exp);
4418 fn = build_call_expr (fndecl, 2, arg1, arg2);
4419 if (TREE_CODE (fn) == CALL_EXPR)
4420 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4421 return expand_call (fn, target, target == const0_rtx);
4427 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4428 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4429 the result in TARGET, if convenient. */
4432 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4434 location_t loc = EXPR_LOCATION (exp);
4436 if (!validate_arglist (exp,
4437 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4441 tree result = fold_builtin_strncmp (loc,
4442 CALL_EXPR_ARG (exp, 0),
4443 CALL_EXPR_ARG (exp, 1),
4444 CALL_EXPR_ARG (exp, 2));
4446 return expand_expr (result, target, mode, EXPAND_NORMAL);
4449 /* If c_strlen can determine an expression for one of the string
4450 lengths, and it doesn't have side effects, then emit cmpstrnsi
4451 using length MIN(strlen(string)+1, arg3). */
4452 #ifdef HAVE_cmpstrnsi
4455 tree len, len1, len2;
4456 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4459 tree arg1 = CALL_EXPR_ARG (exp, 0);
4460 tree arg2 = CALL_EXPR_ARG (exp, 1);
4461 tree arg3 = CALL_EXPR_ARG (exp, 2);
4464 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4466 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4467 enum machine_mode insn_mode
4468 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4470 len1 = c_strlen (arg1, 1);
4471 len2 = c_strlen (arg2, 1);
4474 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4476 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4478 /* If we don't have a constant length for the first, use the length
4479 of the second, if we know it. We don't require a constant for
4480 this case; some cost analysis could be done if both are available
4481 but neither is constant. For now, assume they're equally cheap,
4482 unless one has side effects. If both strings have constant lengths,
4489 else if (TREE_SIDE_EFFECTS (len1))
4491 else if (TREE_SIDE_EFFECTS (len2))
4493 else if (TREE_CODE (len1) != INTEGER_CST)
4495 else if (TREE_CODE (len2) != INTEGER_CST)
4497 else if (tree_int_cst_lt (len1, len2))
4502 /* If both arguments have side effects, we cannot optimize. */
4503 if (!len || TREE_SIDE_EFFECTS (len))
4506 /* The actual new length parameter is MIN(len,arg3). */
4507 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4508 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4510 /* If we don't have POINTER_TYPE, call the function. */
4511 if (arg1_align == 0 || arg2_align == 0)
4514 /* Make a place to write the result of the instruction. */
4517 && REG_P (result) && GET_MODE (result) == insn_mode
4518 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4519 result = gen_reg_rtx (insn_mode);
4521 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4522 arg1 = builtin_save_expr (arg1);
4523 arg2 = builtin_save_expr (arg2);
4524 len = builtin_save_expr (len);
4526 arg1_rtx = get_memory_rtx (arg1, len);
4527 arg2_rtx = get_memory_rtx (arg2, len);
4528 arg3_rtx = expand_normal (len);
4529 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4530 GEN_INT (MIN (arg1_align, arg2_align)));
4535 /* Return the value in the proper mode for this function. */
4536 mode = TYPE_MODE (TREE_TYPE (exp));
4537 if (GET_MODE (result) == mode)
4540 return convert_to_mode (mode, result, 0);
4541 convert_move (target, result, 0);
4545 /* Expand the library call ourselves using a stabilized argument
4546 list to avoid re-evaluating the function's arguments twice. */
4547 fndecl = get_callee_fndecl (exp);
4548 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4549 if (TREE_CODE (fn) == CALL_EXPR)
4550 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4551 return expand_call (fn, target, target == const0_rtx);
4557 /* Expand expression EXP, which is a call to the strcat builtin.
4558 Return NULL_RTX if we failed the caller should emit a normal call,
4559 otherwise try to get the result in TARGET, if convenient. */
4562 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4564 location_t loc = EXPR_LOCATION (exp);
4566 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4570 tree dst = CALL_EXPR_ARG (exp, 0);
4571 tree src = CALL_EXPR_ARG (exp, 1);
4572 const char *p = c_getstr (src);
4574 /* If the string length is zero, return the dst parameter. */
4575 if (p && *p == '\0')
4576 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4578 if (optimize_insn_for_speed_p ())
4580 /* See if we can store by pieces into (dst + strlen(dst)). */
4581 tree newsrc, newdst,
4582 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4585 /* Stabilize the argument list. */
4586 newsrc = builtin_save_expr (src);
4587 dst = builtin_save_expr (dst);
4591 /* Create strlen (dst). */
4592 newdst = build_call_expr (strlen_fn, 1, dst);
4593 /* Create (dst p+ strlen (dst)). */
4595 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
4596 TREE_TYPE (dst), dst, newdst);
4597 newdst = builtin_save_expr (newdst);
4599 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4601 end_sequence (); /* Stop sequence. */
4605 /* Output the entire sequence. */
4606 insns = get_insns ();
4610 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4617 /* Expand expression EXP, which is a call to the strncat builtin.
4618 Return NULL_RTX if we failed the caller should emit a normal call,
4619 otherwise try to get the result in TARGET, if convenient. */
4622 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4624 if (validate_arglist (exp,
4625 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4627 tree result = fold_builtin_strncat (EXPR_LOCATION (exp),
4628 CALL_EXPR_ARG (exp, 0),
4629 CALL_EXPR_ARG (exp, 1),
4630 CALL_EXPR_ARG (exp, 2));
4632 return expand_expr (result, target, mode, EXPAND_NORMAL);
4637 /* Expand expression EXP, which is a call to the strspn builtin.
4638 Return NULL_RTX if we failed the caller should emit a normal call,
4639 otherwise try to get the result in TARGET, if convenient. */
4642 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4644 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4646 tree result = fold_builtin_strspn (EXPR_LOCATION (exp),
4647 CALL_EXPR_ARG (exp, 0),
4648 CALL_EXPR_ARG (exp, 1));
4650 return expand_expr (result, target, mode, EXPAND_NORMAL);
4655 /* Expand expression EXP, which is a call to the strcspn builtin.
4656 Return NULL_RTX if we failed the caller should emit a normal call,
4657 otherwise try to get the result in TARGET, if convenient. */
4660 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4662 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4664 tree result = fold_builtin_strcspn (EXPR_LOCATION (exp),
4665 CALL_EXPR_ARG (exp, 0),
4666 CALL_EXPR_ARG (exp, 1));
4668 return expand_expr (result, target, mode, EXPAND_NORMAL);
4673 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4674 if that's convenient. */
4677 expand_builtin_saveregs (void)
4681 /* Don't do __builtin_saveregs more than once in a function.
4682 Save the result of the first call and reuse it. */
4683 if (saveregs_value != 0)
4684 return saveregs_value;
4686 /* When this function is called, it means that registers must be
4687 saved on entry to this function. So we migrate the call to the
4688 first insn of this function. */
4692 /* Do whatever the machine needs done in this case. */
4693 val = targetm.calls.expand_builtin_saveregs ();
4698 saveregs_value = val;
4700 /* Put the insns after the NOTE that starts the function. If this
4701 is inside a start_sequence, make the outer-level insn chain current, so
4702 the code is placed at the start of the function. */
4703 push_topmost_sequence ();
4704 emit_insn_after (seq, entry_of_function ());
4705 pop_topmost_sequence ();
4710 /* __builtin_args_info (N) returns word N of the arg space info
4711 for the current function. The number and meanings of words
4712 is controlled by the definition of CUMULATIVE_ARGS. */
4715 expand_builtin_args_info (tree exp)
4717 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4718 int *word_ptr = (int *) &crtl->args.info;
4720 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4722 if (call_expr_nargs (exp) != 0)
4724 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4725 error ("argument of %<__builtin_args_info%> must be constant");
4728 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4730 if (wordnum < 0 || wordnum >= nwords)
4731 error ("argument of %<__builtin_args_info%> out of range");
4733 return GEN_INT (word_ptr[wordnum]);
4737 error ("missing argument in %<__builtin_args_info%>");
4742 /* Expand a call to __builtin_next_arg. */
4745 expand_builtin_next_arg (void)
4747 /* Checking arguments is already done in fold_builtin_next_arg
4748 that must be called before this function. */
4749 return expand_binop (ptr_mode, add_optab,
4750 crtl->args.internal_arg_pointer,
4751 crtl->args.arg_offset_rtx,
4752 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4755 /* Make it easier for the backends by protecting the valist argument
4756 from multiple evaluations. */
4759 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4761 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4763 gcc_assert (vatype != NULL_TREE);
4765 if (TREE_CODE (vatype) == ARRAY_TYPE)
4767 if (TREE_SIDE_EFFECTS (valist))
4768 valist = save_expr (valist);
4770 /* For this case, the backends will be expecting a pointer to
4771 vatype, but it's possible we've actually been given an array
4772 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4774 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4776 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4777 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4786 if (! TREE_SIDE_EFFECTS (valist))
4789 pt = build_pointer_type (vatype);
4790 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4791 TREE_SIDE_EFFECTS (valist) = 1;
4794 if (TREE_SIDE_EFFECTS (valist))
4795 valist = save_expr (valist);
4796 valist = build_fold_indirect_ref_loc (loc, valist);
4802 /* The "standard" definition of va_list is void*. */
4805 std_build_builtin_va_list (void)
4807 return ptr_type_node;
4810 /* The "standard" abi va_list is va_list_type_node. */
4813 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4815 return va_list_type_node;
4818 /* The "standard" type of va_list is va_list_type_node. */
4821 std_canonical_va_list_type (tree type)
4825 if (INDIRECT_REF_P (type))
4826 type = TREE_TYPE (type);
4827 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4828 type = TREE_TYPE (type);
4829 wtype = va_list_type_node;
4831 /* Treat structure va_list types. */
4832 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4833 htype = TREE_TYPE (htype);
4834 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4836 /* If va_list is an array type, the argument may have decayed
4837 to a pointer type, e.g. by being passed to another function.
4838 In that case, unwrap both types so that we can compare the
4839 underlying records. */
4840 if (TREE_CODE (htype) == ARRAY_TYPE
4841 || POINTER_TYPE_P (htype))
4843 wtype = TREE_TYPE (wtype);
4844 htype = TREE_TYPE (htype);
4847 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4848 return va_list_type_node;
4853 /* The "standard" implementation of va_start: just assign `nextarg' to
4857 std_expand_builtin_va_start (tree valist, rtx nextarg)
4859 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4860 convert_move (va_r, nextarg, 0);
4863 /* Expand EXP, a call to __builtin_va_start. */
4866 expand_builtin_va_start (tree exp)
4870 location_t loc = EXPR_LOCATION (exp);
4872 if (call_expr_nargs (exp) < 2)
4874 error_at (loc, "too few arguments to function %<va_start%>");
4878 if (fold_builtin_next_arg (exp, true))
4881 nextarg = expand_builtin_next_arg ();
4882 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4884 if (targetm.expand_builtin_va_start)
4885 targetm.expand_builtin_va_start (valist, nextarg);
4887 std_expand_builtin_va_start (valist, nextarg);
4892 /* The "standard" implementation of va_arg: read the value from the
4893 current (padded) address and increment by the (padded) size. */
4896 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4899 tree addr, t, type_size, rounded_size, valist_tmp;
4900 unsigned HOST_WIDE_INT align, boundary;
4903 #ifdef ARGS_GROW_DOWNWARD
4904 /* All of the alignment and movement below is for args-grow-up machines.
4905 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4906 implement their own specialized gimplify_va_arg_expr routines. */
4910 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4912 type = build_pointer_type (type);
4914 align = PARM_BOUNDARY / BITS_PER_UNIT;
4915 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4917 /* When we align parameter on stack for caller, if the parameter
4918 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4919 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4920 here with caller. */
4921 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4922 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4924 boundary /= BITS_PER_UNIT;
4926 /* Hoist the valist value into a temporary for the moment. */
4927 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4929 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4930 requires greater alignment, we must perform dynamic alignment. */
4931 if (boundary > align
4932 && !integer_zerop (TYPE_SIZE (type)))
4934 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4935 fold_build2 (POINTER_PLUS_EXPR,
4937 valist_tmp, size_int (boundary - 1)));
4938 gimplify_and_add (t, pre_p);
4940 t = fold_convert (sizetype, valist_tmp);
4941 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4942 fold_convert (TREE_TYPE (valist),
4943 fold_build2 (BIT_AND_EXPR, sizetype, t,
4944 size_int (-boundary))));
4945 gimplify_and_add (t, pre_p);
4950 /* If the actual alignment is less than the alignment of the type,
4951 adjust the type accordingly so that we don't assume strict alignment
4952 when dereferencing the pointer. */
4953 boundary *= BITS_PER_UNIT;
4954 if (boundary < TYPE_ALIGN (type))
4956 type = build_variant_type_copy (type);
4957 TYPE_ALIGN (type) = boundary;
4960 /* Compute the rounded size of the type. */
4961 type_size = size_in_bytes (type);
4962 rounded_size = round_up (type_size, align);
4964 /* Reduce rounded_size so it's sharable with the postqueue. */
4965 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4969 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4971 /* Small args are padded downward. */
4972 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4973 rounded_size, size_int (align));
4974 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4975 size_binop (MINUS_EXPR, rounded_size, type_size));
4976 addr = fold_build2 (POINTER_PLUS_EXPR,
4977 TREE_TYPE (addr), addr, t);
4980 /* Compute new value for AP. */
4981 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4982 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4983 gimplify_and_add (t, pre_p);
4985 addr = fold_convert (build_pointer_type (type), addr);
4988 addr = build_va_arg_indirect_ref (addr);
4990 return build_va_arg_indirect_ref (addr);
4993 /* Build an indirect-ref expression over the given TREE, which represents a
4994 piece of a va_arg() expansion. */
4996 build_va_arg_indirect_ref (tree addr)
4998 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
5000 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
5006 /* Return a dummy expression of type TYPE in order to keep going after an
5010 dummy_object (tree type)
5012 tree t = build_int_cst (build_pointer_type (type), 0);
5013 return build1 (INDIRECT_REF, type, t);
5016 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
5017 builtin function, but a very special sort of operator. */
5019 enum gimplify_status
5020 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5022 tree promoted_type, have_va_type;
5023 tree valist = TREE_OPERAND (*expr_p, 0);
5024 tree type = TREE_TYPE (*expr_p);
5026 location_t loc = EXPR_LOCATION (*expr_p);
5028 /* Verify that valist is of the proper type. */
5029 have_va_type = TREE_TYPE (valist);
5030 if (have_va_type == error_mark_node)
5032 have_va_type = targetm.canonical_va_list_type (have_va_type);
5034 if (have_va_type == NULL_TREE)
5036 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
5040 /* Generate a diagnostic for requesting data of a type that cannot
5041 be passed through `...' due to type promotion at the call site. */
5042 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
5045 static bool gave_help;
5048 /* Unfortunately, this is merely undefined, rather than a constraint
5049 violation, so we cannot make this an error. If this call is never
5050 executed, the program is still strictly conforming. */
5051 warned = warning_at (loc, 0,
5052 "%qT is promoted to %qT when passed through %<...%>",
5053 type, promoted_type);
5054 if (!gave_help && warned)
5057 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
5058 promoted_type, type);
5061 /* We can, however, treat "undefined" any way we please.
5062 Call abort to encourage the user to fix the program. */
5064 inform (loc, "if this code is reached, the program will abort");
5065 /* Before the abort, allow the evaluation of the va_list
5066 expression to exit or longjmp. */
5067 gimplify_and_add (valist, pre_p);
5068 t = build_call_expr_loc (loc,
5069 implicit_built_in_decls[BUILT_IN_TRAP], 0);
5070 gimplify_and_add (t, pre_p);
5072 /* This is dead code, but go ahead and finish so that the
5073 mode of the result comes out right. */
5074 *expr_p = dummy_object (type);
5079 /* Make it easier for the backends by protecting the valist argument
5080 from multiple evaluations. */
5081 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5083 /* For this case, the backends will be expecting a pointer to
5084 TREE_TYPE (abi), but it's possible we've
5085 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5087 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5089 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5090 valist = fold_convert_loc (loc, p1,
5091 build_fold_addr_expr_loc (loc, valist));
5094 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5097 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5099 if (!targetm.gimplify_va_arg_expr)
5100 /* FIXME: Once most targets are converted we should merely
5101 assert this is non-null. */
5104 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5109 /* Expand EXP, a call to __builtin_va_end. */
5112 expand_builtin_va_end (tree exp)
5114 tree valist = CALL_EXPR_ARG (exp, 0);
5116 /* Evaluate for side effects, if needed. I hate macros that don't
5118 if (TREE_SIDE_EFFECTS (valist))
5119 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5124 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5125 builtin rather than just as an assignment in stdarg.h because of the
5126 nastiness of array-type va_list types. */
5129 expand_builtin_va_copy (tree exp)
5132 location_t loc = EXPR_LOCATION (exp);
5134 dst = CALL_EXPR_ARG (exp, 0);
5135 src = CALL_EXPR_ARG (exp, 1);
5137 dst = stabilize_va_list_loc (loc, dst, 1);
5138 src = stabilize_va_list_loc (loc, src, 0);
5140 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5142 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5144 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5145 TREE_SIDE_EFFECTS (t) = 1;
5146 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5150 rtx dstb, srcb, size;
5152 /* Evaluate to pointers. */
5153 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5154 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5155 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5156 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5158 dstb = convert_memory_address (Pmode, dstb);
5159 srcb = convert_memory_address (Pmode, srcb);
5161 /* "Dereference" to BLKmode memories. */
5162 dstb = gen_rtx_MEM (BLKmode, dstb);
5163 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5164 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5165 srcb = gen_rtx_MEM (BLKmode, srcb);
5166 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5167 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5170 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5176 /* Expand a call to one of the builtin functions __builtin_frame_address or
5177 __builtin_return_address. */
5180 expand_builtin_frame_address (tree fndecl, tree exp)
5182 /* The argument must be a nonnegative integer constant.
5183 It counts the number of frames to scan up the stack.
5184 The value is the return address saved in that frame. */
5185 if (call_expr_nargs (exp) == 0)
5186 /* Warning about missing arg was already issued. */
5188 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5190 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5191 error ("invalid argument to %<__builtin_frame_address%>");
5193 error ("invalid argument to %<__builtin_return_address%>");
5199 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5200 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5202 /* Some ports cannot access arbitrary stack frames. */
5205 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5206 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5208 warning (0, "unsupported argument to %<__builtin_return_address%>");
5212 /* For __builtin_frame_address, return what we've got. */
5213 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5217 && ! CONSTANT_P (tem))
5218 tem = copy_to_mode_reg (Pmode, tem);
5223 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5224 we failed and the caller should emit a normal call, otherwise try to get
5225 the result in TARGET, if convenient. */
5228 expand_builtin_alloca (tree exp, rtx target)
5233 /* Emit normal call if marked not-inlineable. */
5234 if (CALL_CANNOT_INLINE_P (exp))
5237 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5240 /* Compute the argument. */
5241 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5243 /* Allocate the desired space. */
5244 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5245 result = convert_memory_address (ptr_mode, result);
5250 /* Expand a call to a bswap builtin with argument ARG0. MODE
5251 is the mode to expand with. */
5254 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5256 enum machine_mode mode;
5260 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5263 arg = CALL_EXPR_ARG (exp, 0);
5264 mode = TYPE_MODE (TREE_TYPE (arg));
5265 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5267 target = expand_unop (mode, bswap_optab, op0, target, 1);
5269 gcc_assert (target);
5271 return convert_to_mode (mode, target, 0);
5274 /* Expand a call to a unary builtin in EXP.
5275 Return NULL_RTX if a normal call should be emitted rather than expanding the
5276 function in-line. If convenient, the result should be placed in TARGET.
5277 SUBTARGET may be used as the target for computing one of EXP's operands. */
5280 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5281 rtx subtarget, optab op_optab)
5285 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5288 /* Compute the argument. */
5289 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5290 VOIDmode, EXPAND_NORMAL);
5291 /* Compute op, into TARGET if possible.
5292 Set TARGET to wherever the result comes back. */
5293 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5294 op_optab, op0, target, 1);
5295 gcc_assert (target);
5297 return convert_to_mode (target_mode, target, 0);
5300 /* If the string passed to fputs is a constant and is one character
5301 long, we attempt to transform this call into __builtin_fputc(). */
5304 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5306 /* Verify the arguments in the original call. */
5307 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5309 tree result = fold_builtin_fputs (EXPR_LOCATION (exp),
5310 CALL_EXPR_ARG (exp, 0),
5311 CALL_EXPR_ARG (exp, 1),
5312 (target == const0_rtx),
5313 unlocked, NULL_TREE);
5315 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5320 /* Expand a call to __builtin_expect. We just return our argument
5321 as the builtin_expect semantic should've been already executed by
5322 tree branch prediction pass. */
5325 expand_builtin_expect (tree exp, rtx target)
5329 if (call_expr_nargs (exp) < 2)
5331 arg = CALL_EXPR_ARG (exp, 0);
5332 c = CALL_EXPR_ARG (exp, 1);
5334 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5335 /* When guessing was done, the hints should be already stripped away. */
5336 gcc_assert (!flag_guess_branch_prob
5337 || optimize == 0 || errorcount || sorrycount);
5342 expand_builtin_trap (void)
5346 emit_insn (gen_trap ());
5349 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5353 /* Expand a call to __builtin_unreachable. We do nothing except emit
5354 a barrier saying that control flow will not pass here.
5356 It is the responsibility of the program being compiled to ensure
5357 that control flow does never reach __builtin_unreachable. */
5359 expand_builtin_unreachable (void)
5364 /* Expand EXP, a call to fabs, fabsf or fabsl.
5365 Return NULL_RTX if a normal call should be emitted rather than expanding
5366 the function inline. If convenient, the result should be placed
5367 in TARGET. SUBTARGET may be used as the target for computing
5371 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5373 enum machine_mode mode;
5377 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5380 arg = CALL_EXPR_ARG (exp, 0);
5381 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5382 mode = TYPE_MODE (TREE_TYPE (arg));
5383 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5384 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5387 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5388 Return NULL is a normal call should be emitted rather than expanding the
5389 function inline. If convenient, the result should be placed in TARGET.
5390 SUBTARGET may be used as the target for computing the operand. */
5393 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5398 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5401 arg = CALL_EXPR_ARG (exp, 0);
5402 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5404 arg = CALL_EXPR_ARG (exp, 1);
5405 op1 = expand_normal (arg);
5407 return expand_copysign (op0, op1, target);
5410 /* Create a new constant string literal and return a char* pointer to it.
5411 The STRING_CST value is the LEN characters at STR. */
5413 build_string_literal (int len, const char *str)
5415 tree t, elem, index, type;
5417 t = build_string (len, str);
5418 elem = build_type_variant (char_type_node, 1, 0);
5419 index = build_index_type (size_int (len - 1));
5420 type = build_array_type (elem, index);
5421 TREE_TYPE (t) = type;
5422 TREE_CONSTANT (t) = 1;
5423 TREE_READONLY (t) = 1;
5424 TREE_STATIC (t) = 1;
5426 type = build_pointer_type (elem);
5427 t = build1 (ADDR_EXPR, type,
5428 build4 (ARRAY_REF, elem,
5429 t, integer_zero_node, NULL_TREE, NULL_TREE));
5433 /* Expand EXP, a call to printf or printf_unlocked.
5434 Return NULL_RTX if a normal call should be emitted rather than transforming
5435 the function inline. If convenient, the result should be placed in
5436 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5439 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5442 /* If we're using an unlocked function, assume the other unlocked
5443 functions exist explicitly. */
5444 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5445 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5446 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5447 : implicit_built_in_decls[BUILT_IN_PUTS];
5448 const char *fmt_str;
5451 int nargs = call_expr_nargs (exp);
5453 /* If the return value is used, don't do the transformation. */
5454 if (target != const0_rtx)
5457 /* Verify the required arguments in the original call. */
5460 fmt = CALL_EXPR_ARG (exp, 0);
5461 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5464 /* Check whether the format is a literal string constant. */
5465 fmt_str = c_getstr (fmt);
5466 if (fmt_str == NULL)
5469 if (!init_target_chars ())
5472 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5473 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5476 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5479 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5481 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5482 else if (strcmp (fmt_str, target_percent_c) == 0)
5485 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5488 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5492 /* We can't handle anything else with % args or %% ... yet. */
5493 if (strchr (fmt_str, target_percent))
5499 /* If the format specifier was "", printf does nothing. */
5500 if (fmt_str[0] == '\0')
5502 /* If the format specifier has length of 1, call putchar. */
5503 if (fmt_str[1] == '\0')
5505 /* Given printf("c"), (where c is any one character,)
5506 convert "c"[0] to an int and pass that to the replacement
5508 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5510 fn = build_call_expr (fn_putchar, 1, arg);
5514 /* If the format specifier was "string\n", call puts("string"). */
5515 size_t len = strlen (fmt_str);
5516 if ((unsigned char)fmt_str[len - 1] == target_newline)
5518 /* Create a NUL-terminated string that's one char shorter
5519 than the original, stripping off the trailing '\n'. */
5520 char *newstr = XALLOCAVEC (char, len);
5521 memcpy (newstr, fmt_str, len - 1);
5522 newstr[len - 1] = 0;
5523 arg = build_string_literal (len, newstr);
5525 fn = build_call_expr (fn_puts, 1, arg);
5528 /* We'd like to arrange to call fputs(string,stdout) here,
5529 but we need stdout and don't have a way to get it yet. */
5536 if (TREE_CODE (fn) == CALL_EXPR)
5537 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5538 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5541 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5542 Return NULL_RTX if a normal call should be emitted rather than transforming
5543 the function inline. If convenient, the result should be placed in
5544 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5547 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5550 /* If we're using an unlocked function, assume the other unlocked
5551 functions exist explicitly. */
5552 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5553 : implicit_built_in_decls[BUILT_IN_FPUTC];
5554 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5555 : implicit_built_in_decls[BUILT_IN_FPUTS];
5556 const char *fmt_str;
5559 int nargs = call_expr_nargs (exp);
5561 /* If the return value is used, don't do the transformation. */
5562 if (target != const0_rtx)
5565 /* Verify the required arguments in the original call. */
5568 fp = CALL_EXPR_ARG (exp, 0);
5569 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5571 fmt = CALL_EXPR_ARG (exp, 1);
5572 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5575 /* Check whether the format is a literal string constant. */
5576 fmt_str = c_getstr (fmt);
5577 if (fmt_str == NULL)
5580 if (!init_target_chars ())
5583 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5584 if (strcmp (fmt_str, target_percent_s) == 0)
5587 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5589 arg = CALL_EXPR_ARG (exp, 2);
5591 fn = build_call_expr (fn_fputs, 2, arg, fp);
5593 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5594 else if (strcmp (fmt_str, target_percent_c) == 0)
5597 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5599 arg = CALL_EXPR_ARG (exp, 2);
5601 fn = build_call_expr (fn_fputc, 2, arg, fp);
5605 /* We can't handle anything else with % args or %% ... yet. */
5606 if (strchr (fmt_str, target_percent))
5612 /* If the format specifier was "", fprintf does nothing. */
5613 if (fmt_str[0] == '\0')
5615 /* Evaluate and ignore FILE* argument for side-effects. */
5616 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5620 /* When "string" doesn't contain %, replace all cases of
5621 fprintf(stream,string) with fputs(string,stream). The fputs
5622 builtin will take care of special cases like length == 1. */
5624 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5629 if (TREE_CODE (fn) == CALL_EXPR)
5630 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5631 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5634 /* Expand a call EXP to sprintf. Return NULL_RTX if
5635 a normal call should be emitted rather than expanding the function
5636 inline. If convenient, the result should be placed in TARGET with
5640 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5643 const char *fmt_str;
5644 int nargs = call_expr_nargs (exp);
5646 /* Verify the required arguments in the original call. */
5649 dest = CALL_EXPR_ARG (exp, 0);
5650 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5652 fmt = CALL_EXPR_ARG (exp, 0);
5653 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5656 /* Check whether the format is a literal string constant. */
5657 fmt_str = c_getstr (fmt);
5658 if (fmt_str == NULL)
5661 if (!init_target_chars ())
5664 /* If the format doesn't contain % args or %%, use strcpy. */
5665 if (strchr (fmt_str, target_percent) == 0)
5667 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5670 if ((nargs > 2) || ! fn)
5672 expand_expr (build_call_expr (fn, 2, dest, fmt),
5673 const0_rtx, VOIDmode, EXPAND_NORMAL);
5674 if (target == const0_rtx)
5676 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5677 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5679 /* If the format is "%s", use strcpy if the result isn't used. */
5680 else if (strcmp (fmt_str, target_percent_s) == 0)
5683 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5689 arg = CALL_EXPR_ARG (exp, 2);
5690 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5693 if (target != const0_rtx)
5695 len = c_strlen (arg, 1);
5696 if (! len || TREE_CODE (len) != INTEGER_CST)
5702 expand_expr (build_call_expr (fn, 2, dest, arg),
5703 const0_rtx, VOIDmode, EXPAND_NORMAL);
5705 if (target == const0_rtx)
5707 return expand_expr (len, target, mode, EXPAND_NORMAL);
5713 /* Expand a call to either the entry or exit function profiler. */
5716 expand_builtin_profile_func (bool exitp)
5718 rtx this_rtx, which;
5720 this_rtx = DECL_RTL (current_function_decl);
5721 gcc_assert (MEM_P (this_rtx));
5722 this_rtx = XEXP (this_rtx, 0);
5725 which = profile_function_exit_libfunc;
5727 which = profile_function_entry_libfunc;
5729 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5730 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5737 /* Expand a call to __builtin___clear_cache. */
5740 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5742 #ifndef HAVE_clear_cache
5743 #ifdef CLEAR_INSN_CACHE
5744 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5745 does something. Just do the default expansion to a call to
5749 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5750 does nothing. There is no need to call it. Do nothing. */
5752 #endif /* CLEAR_INSN_CACHE */
5754 /* We have a "clear_cache" insn, and it will handle everything. */
5756 rtx begin_rtx, end_rtx;
5757 enum insn_code icode;
5759 /* We must not expand to a library call. If we did, any
5760 fallback library function in libgcc that might contain a call to
5761 __builtin___clear_cache() would recurse infinitely. */
5762 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5764 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5768 if (HAVE_clear_cache)
5770 icode = CODE_FOR_clear_cache;
5772 begin = CALL_EXPR_ARG (exp, 0);
5773 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5774 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5775 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5776 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5778 end = CALL_EXPR_ARG (exp, 1);
5779 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5780 end_rtx = convert_memory_address (Pmode, end_rtx);
5781 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5782 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5784 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5787 #endif /* HAVE_clear_cache */
5790 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5793 round_trampoline_addr (rtx tramp)
5795 rtx temp, addend, mask;
5797 /* If we don't need too much alignment, we'll have been guaranteed
5798 proper alignment by get_trampoline_type. */
5799 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5802 /* Round address up to desired boundary. */
5803 temp = gen_reg_rtx (Pmode);
5804 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5805 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5807 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5808 temp, 0, OPTAB_LIB_WIDEN);
5809 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5810 temp, 0, OPTAB_LIB_WIDEN);
5816 expand_builtin_init_trampoline (tree exp)
5818 tree t_tramp, t_func, t_chain;
5819 rtx m_tramp, r_tramp, r_chain, tmp;
5821 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5822 POINTER_TYPE, VOID_TYPE))
5825 t_tramp = CALL_EXPR_ARG (exp, 0);
5826 t_func = CALL_EXPR_ARG (exp, 1);
5827 t_chain = CALL_EXPR_ARG (exp, 2);
5829 r_tramp = expand_normal (t_tramp);
5830 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5831 MEM_NOTRAP_P (m_tramp) = 1;
5833 /* The TRAMP argument should be the address of a field within the
5834 local function's FRAME decl. Let's see if we can fill in the
5835 to fill in the MEM_ATTRs for this memory. */
5836 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5837 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5840 tmp = round_trampoline_addr (r_tramp);
5843 m_tramp = change_address (m_tramp, BLKmode, tmp);
5844 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5845 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5848 /* The FUNC argument should be the address of the nested function.
5849 Extract the actual function decl to pass to the hook. */
5850 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5851 t_func = TREE_OPERAND (t_func, 0);
5852 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5854 r_chain = expand_normal (t_chain);
5856 /* Generate insns to initialize the trampoline. */
5857 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5859 trampolines_created = 1;
5864 expand_builtin_adjust_trampoline (tree exp)
5868 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5871 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5872 tramp = round_trampoline_addr (tramp);
5873 if (targetm.calls.trampoline_adjust_address)
5874 tramp = targetm.calls.trampoline_adjust_address (tramp);
5879 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5880 function. The function first checks whether the back end provides
5881 an insn to implement signbit for the respective mode. If not, it
5882 checks whether the floating point format of the value is such that
5883 the sign bit can be extracted. If that is not the case, the
5884 function returns NULL_RTX to indicate that a normal call should be
5885 emitted rather than expanding the function in-line. EXP is the
5886 expression that is a call to the builtin function; if convenient,
5887 the result should be placed in TARGET. */
5889 expand_builtin_signbit (tree exp, rtx target)
5891 const struct real_format *fmt;
5892 enum machine_mode fmode, imode, rmode;
5893 HOST_WIDE_INT hi, lo;
5896 enum insn_code icode;
5898 location_t loc = EXPR_LOCATION (exp);
5900 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5903 arg = CALL_EXPR_ARG (exp, 0);
5904 fmode = TYPE_MODE (TREE_TYPE (arg));
5905 rmode = TYPE_MODE (TREE_TYPE (exp));
5906 fmt = REAL_MODE_FORMAT (fmode);
5908 arg = builtin_save_expr (arg);
5910 /* Expand the argument yielding a RTX expression. */
5911 temp = expand_normal (arg);
5913 /* Check if the back end provides an insn that handles signbit for the
5915 icode = signbit_optab->handlers [(int) fmode].insn_code;
5916 if (icode != CODE_FOR_nothing)
5918 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5919 emit_unop_insn (icode, target, temp, UNKNOWN);
5923 /* For floating point formats without a sign bit, implement signbit
5925 bitpos = fmt->signbit_ro;
5928 /* But we can't do this if the format supports signed zero. */
5929 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5932 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5933 build_real (TREE_TYPE (arg), dconst0));
5934 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5937 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5939 imode = int_mode_for_mode (fmode);
5940 if (imode == BLKmode)
5942 temp = gen_lowpart (imode, temp);
5947 /* Handle targets with different FP word orders. */
5948 if (FLOAT_WORDS_BIG_ENDIAN)
5949 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5951 word = bitpos / BITS_PER_WORD;
5952 temp = operand_subword_force (temp, word, fmode);
5953 bitpos = bitpos % BITS_PER_WORD;
5956 /* Force the intermediate word_mode (or narrower) result into a
5957 register. This avoids attempting to create paradoxical SUBREGs
5958 of floating point modes below. */
5959 temp = force_reg (imode, temp);
5961 /* If the bitpos is within the "result mode" lowpart, the operation
5962 can be implement with a single bitwise AND. Otherwise, we need
5963 a right shift and an AND. */
5965 if (bitpos < GET_MODE_BITSIZE (rmode))
5967 if (bitpos < HOST_BITS_PER_WIDE_INT)
5970 lo = (HOST_WIDE_INT) 1 << bitpos;
5974 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5978 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5979 temp = gen_lowpart (rmode, temp);
5980 temp = expand_binop (rmode, and_optab, temp,
5981 immed_double_const (lo, hi, rmode),
5982 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5986 /* Perform a logical right shift to place the signbit in the least
5987 significant bit, then truncate the result to the desired mode
5988 and mask just this bit. */
5989 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5990 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5991 temp = gen_lowpart (rmode, temp);
5992 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5993 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5999 /* Expand fork or exec calls. TARGET is the desired target of the
6000 call. EXP is the call. FN is the
6001 identificator of the actual function. IGNORE is nonzero if the
6002 value is to be ignored. */
6005 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6010 /* If we are not profiling, just call the function. */
6011 if (!profile_arc_flag)
6014 /* Otherwise call the wrapper. This should be equivalent for the rest of
6015 compiler, so the code does not diverge, and the wrapper may run the
6016 code necessary for keeping the profiling sane. */
6018 switch (DECL_FUNCTION_CODE (fn))
6021 id = get_identifier ("__gcov_fork");
6024 case BUILT_IN_EXECL:
6025 id = get_identifier ("__gcov_execl");
6028 case BUILT_IN_EXECV:
6029 id = get_identifier ("__gcov_execv");
6032 case BUILT_IN_EXECLP:
6033 id = get_identifier ("__gcov_execlp");
6036 case BUILT_IN_EXECLE:
6037 id = get_identifier ("__gcov_execle");
6040 case BUILT_IN_EXECVP:
6041 id = get_identifier ("__gcov_execvp");
6044 case BUILT_IN_EXECVE:
6045 id = get_identifier ("__gcov_execve");
6052 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6053 FUNCTION_DECL, id, TREE_TYPE (fn));
6054 DECL_EXTERNAL (decl) = 1;
6055 TREE_PUBLIC (decl) = 1;
6056 DECL_ARTIFICIAL (decl) = 1;
6057 TREE_NOTHROW (decl) = 1;
6058 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6059 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6060 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6061 return expand_call (call, target, ignore);
6066 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6067 the pointer in these functions is void*, the tree optimizers may remove
6068 casts. The mode computed in expand_builtin isn't reliable either, due
6069 to __sync_bool_compare_and_swap.
6071 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6072 group of builtins. This gives us log2 of the mode size. */
6074 static inline enum machine_mode
6075 get_builtin_sync_mode (int fcode_diff)
6077 /* The size is not negotiable, so ask not to get BLKmode in return
6078 if the target indicates that a smaller size would be better. */
6079 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6082 /* Expand the memory expression LOC and return the appropriate memory operand
6083 for the builtin_sync operations. */
6086 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6090 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6092 /* Note that we explicitly do not want any alias information for this
6093 memory, so that we kill all other live memories. Otherwise we don't
6094 satisfy the full barrier semantics of the intrinsic. */
6095 mem = validize_mem (gen_rtx_MEM (mode, addr));
6097 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6098 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6099 MEM_VOLATILE_P (mem) = 1;
6104 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6105 EXP is the CALL_EXPR. CODE is the rtx code
6106 that corresponds to the arithmetic or logical operation from the name;
6107 an exception here is that NOT actually means NAND. TARGET is an optional
6108 place for us to store the results; AFTER is true if this is the
6109 fetch_and_xxx form. IGNORE is true if we don't actually care about
6110 the result of the operation at all. */
6113 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6114 enum rtx_code code, bool after,
6115 rtx target, bool ignore)
6118 enum machine_mode old_mode;
6119 location_t loc = EXPR_LOCATION (exp);
6121 if (code == NOT && warn_sync_nand)
6123 tree fndecl = get_callee_fndecl (exp);
6124 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6126 static bool warned_f_a_n, warned_n_a_f;
6130 case BUILT_IN_FETCH_AND_NAND_1:
6131 case BUILT_IN_FETCH_AND_NAND_2:
6132 case BUILT_IN_FETCH_AND_NAND_4:
6133 case BUILT_IN_FETCH_AND_NAND_8:
6134 case BUILT_IN_FETCH_AND_NAND_16:
6139 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6140 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6141 warned_f_a_n = true;
6144 case BUILT_IN_NAND_AND_FETCH_1:
6145 case BUILT_IN_NAND_AND_FETCH_2:
6146 case BUILT_IN_NAND_AND_FETCH_4:
6147 case BUILT_IN_NAND_AND_FETCH_8:
6148 case BUILT_IN_NAND_AND_FETCH_16:
6153 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6154 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6155 warned_n_a_f = true;
6163 /* Expand the operands. */
6164 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6166 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6167 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6168 of CONST_INTs, where we know the old_mode only from the call argument. */
6169 old_mode = GET_MODE (val);
6170 if (old_mode == VOIDmode)
6171 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6172 val = convert_modes (mode, old_mode, val, 1);
6175 return expand_sync_operation (mem, val, code);
6177 return expand_sync_fetch_operation (mem, val, code, after, target);
6180 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6181 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6182 true if this is the boolean form. TARGET is a place for us to store the
6183 results; this is NOT optional if IS_BOOL is true. */
6186 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6187 bool is_bool, rtx target)
6189 rtx old_val, new_val, mem;
6190 enum machine_mode old_mode;
6192 /* Expand the operands. */
6193 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6196 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6197 mode, EXPAND_NORMAL);
6198 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6199 of CONST_INTs, where we know the old_mode only from the call argument. */
6200 old_mode = GET_MODE (old_val);
6201 if (old_mode == VOIDmode)
6202 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6203 old_val = convert_modes (mode, old_mode, old_val, 1);
6205 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6206 mode, EXPAND_NORMAL);
6207 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6208 of CONST_INTs, where we know the old_mode only from the call argument. */
6209 old_mode = GET_MODE (new_val);
6210 if (old_mode == VOIDmode)
6211 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6212 new_val = convert_modes (mode, old_mode, new_val, 1);
6215 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6217 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6220 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6221 general form is actually an atomic exchange, and some targets only
6222 support a reduced form with the second argument being a constant 1.
6223 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6227 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6231 enum machine_mode old_mode;
6233 /* Expand the operands. */
6234 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6235 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6236 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6237 of CONST_INTs, where we know the old_mode only from the call argument. */
6238 old_mode = GET_MODE (val);
6239 if (old_mode == VOIDmode)
6240 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6241 val = convert_modes (mode, old_mode, val, 1);
6243 return expand_sync_lock_test_and_set (mem, val, target);
6246 /* Expand the __sync_synchronize intrinsic. */
6249 expand_builtin_synchronize (void)
6252 VEC (tree, gc) *v_clobbers;
6254 #ifdef HAVE_memory_barrier
6255 if (HAVE_memory_barrier)
6257 emit_insn (gen_memory_barrier ());
6262 if (synchronize_libfunc != NULL_RTX)
6264 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6268 /* If no explicit memory barrier instruction is available, create an
6269 empty asm stmt with a memory clobber. */
6270 v_clobbers = VEC_alloc (tree, gc, 1);
6271 VEC_quick_push (tree, v_clobbers,
6272 tree_cons (NULL, build_string (6, "memory"), NULL));
6273 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
6274 gimple_asm_set_volatile (x, true);
6275 expand_asm_stmt (x);
6278 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6281 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6283 enum insn_code icode;
6285 rtx val = const0_rtx;
6287 /* Expand the operands. */
6288 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6290 /* If there is an explicit operation in the md file, use it. */
6291 icode = sync_lock_release[mode];
6292 if (icode != CODE_FOR_nothing)
6294 if (!insn_data[icode].operand[1].predicate (val, mode))
6295 val = force_reg (mode, val);
6297 insn = GEN_FCN (icode) (mem, val);
6305 /* Otherwise we can implement this operation by emitting a barrier
6306 followed by a store of zero. */
6307 expand_builtin_synchronize ();
6308 emit_move_insn (mem, val);
6311 /* Expand an expression EXP that calls a built-in function,
6312 with result going to TARGET if that's convenient
6313 (and in mode MODE if that's convenient).
6314 SUBTARGET may be used as the target for computing one of EXP's operands.
6315 IGNORE is nonzero if the value is to be ignored. */
6318 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6321 tree fndecl = get_callee_fndecl (exp);
6322 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6323 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6325 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6326 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6328 /* When not optimizing, generate calls to library functions for a certain
6331 && !called_as_built_in (fndecl)
6332 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6333 && fcode != BUILT_IN_ALLOCA
6334 && fcode != BUILT_IN_FREE)
6335 return expand_call (exp, target, ignore);
6337 /* The built-in function expanders test for target == const0_rtx
6338 to determine whether the function's result will be ignored. */
6340 target = const0_rtx;
6342 /* If the result of a pure or const built-in function is ignored, and
6343 none of its arguments are volatile, we can avoid expanding the
6344 built-in call and just evaluate the arguments for side-effects. */
6345 if (target == const0_rtx
6346 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6348 bool volatilep = false;
6350 call_expr_arg_iterator iter;
6352 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6353 if (TREE_THIS_VOLATILE (arg))
6361 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6362 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6369 CASE_FLT_FN (BUILT_IN_FABS):
6370 target = expand_builtin_fabs (exp, target, subtarget);
6375 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6376 target = expand_builtin_copysign (exp, target, subtarget);
6381 /* Just do a normal library call if we were unable to fold
6383 CASE_FLT_FN (BUILT_IN_CABS):
6386 CASE_FLT_FN (BUILT_IN_EXP):
6387 CASE_FLT_FN (BUILT_IN_EXP10):
6388 CASE_FLT_FN (BUILT_IN_POW10):
6389 CASE_FLT_FN (BUILT_IN_EXP2):
6390 CASE_FLT_FN (BUILT_IN_EXPM1):
6391 CASE_FLT_FN (BUILT_IN_LOGB):
6392 CASE_FLT_FN (BUILT_IN_LOG):
6393 CASE_FLT_FN (BUILT_IN_LOG10):
6394 CASE_FLT_FN (BUILT_IN_LOG2):
6395 CASE_FLT_FN (BUILT_IN_LOG1P):
6396 CASE_FLT_FN (BUILT_IN_TAN):
6397 CASE_FLT_FN (BUILT_IN_ASIN):
6398 CASE_FLT_FN (BUILT_IN_ACOS):
6399 CASE_FLT_FN (BUILT_IN_ATAN):
6400 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6401 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6402 because of possible accuracy problems. */
6403 if (! flag_unsafe_math_optimizations)
6405 CASE_FLT_FN (BUILT_IN_SQRT):
6406 CASE_FLT_FN (BUILT_IN_FLOOR):
6407 CASE_FLT_FN (BUILT_IN_CEIL):
6408 CASE_FLT_FN (BUILT_IN_TRUNC):
6409 CASE_FLT_FN (BUILT_IN_ROUND):
6410 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6411 CASE_FLT_FN (BUILT_IN_RINT):
6412 target = expand_builtin_mathfn (exp, target, subtarget);
6417 CASE_FLT_FN (BUILT_IN_ILOGB):
6418 if (! flag_unsafe_math_optimizations)
6420 CASE_FLT_FN (BUILT_IN_ISINF):
6421 CASE_FLT_FN (BUILT_IN_FINITE):
6422 case BUILT_IN_ISFINITE:
6423 case BUILT_IN_ISNORMAL:
6424 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6429 CASE_FLT_FN (BUILT_IN_LCEIL):
6430 CASE_FLT_FN (BUILT_IN_LLCEIL):
6431 CASE_FLT_FN (BUILT_IN_LFLOOR):
6432 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6433 target = expand_builtin_int_roundingfn (exp, target);
6438 CASE_FLT_FN (BUILT_IN_LRINT):
6439 CASE_FLT_FN (BUILT_IN_LLRINT):
6440 CASE_FLT_FN (BUILT_IN_LROUND):
6441 CASE_FLT_FN (BUILT_IN_LLROUND):
6442 target = expand_builtin_int_roundingfn_2 (exp, target);
6447 CASE_FLT_FN (BUILT_IN_POW):
6448 target = expand_builtin_pow (exp, target, subtarget);
6453 CASE_FLT_FN (BUILT_IN_POWI):
6454 target = expand_builtin_powi (exp, target, subtarget);
6459 CASE_FLT_FN (BUILT_IN_ATAN2):
6460 CASE_FLT_FN (BUILT_IN_LDEXP):
6461 CASE_FLT_FN (BUILT_IN_SCALB):
6462 CASE_FLT_FN (BUILT_IN_SCALBN):
6463 CASE_FLT_FN (BUILT_IN_SCALBLN):
6464 if (! flag_unsafe_math_optimizations)
6467 CASE_FLT_FN (BUILT_IN_FMOD):
6468 CASE_FLT_FN (BUILT_IN_REMAINDER):
6469 CASE_FLT_FN (BUILT_IN_DREM):
6470 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6475 CASE_FLT_FN (BUILT_IN_CEXPI):
6476 target = expand_builtin_cexpi (exp, target, subtarget);
6477 gcc_assert (target);
6480 CASE_FLT_FN (BUILT_IN_SIN):
6481 CASE_FLT_FN (BUILT_IN_COS):
6482 if (! flag_unsafe_math_optimizations)
6484 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6489 CASE_FLT_FN (BUILT_IN_SINCOS):
6490 if (! flag_unsafe_math_optimizations)
6492 target = expand_builtin_sincos (exp);
6497 case BUILT_IN_APPLY_ARGS:
6498 return expand_builtin_apply_args ();
6500 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6501 FUNCTION with a copy of the parameters described by
6502 ARGUMENTS, and ARGSIZE. It returns a block of memory
6503 allocated on the stack into which is stored all the registers
6504 that might possibly be used for returning the result of a
6505 function. ARGUMENTS is the value returned by
6506 __builtin_apply_args. ARGSIZE is the number of bytes of
6507 arguments that must be copied. ??? How should this value be
6508 computed? We'll also need a safe worst case value for varargs
6510 case BUILT_IN_APPLY:
6511 if (!validate_arglist (exp, POINTER_TYPE,
6512 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6513 && !validate_arglist (exp, REFERENCE_TYPE,
6514 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6520 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6521 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6522 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6524 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6527 /* __builtin_return (RESULT) causes the function to return the
6528 value described by RESULT. RESULT is address of the block of
6529 memory returned by __builtin_apply. */
6530 case BUILT_IN_RETURN:
6531 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6532 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6535 case BUILT_IN_SAVEREGS:
6536 return expand_builtin_saveregs ();
6538 case BUILT_IN_ARGS_INFO:
6539 return expand_builtin_args_info (exp);
6541 case BUILT_IN_VA_ARG_PACK:
6542 /* All valid uses of __builtin_va_arg_pack () are removed during
6544 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6547 case BUILT_IN_VA_ARG_PACK_LEN:
6548 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6550 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6553 /* Return the address of the first anonymous stack arg. */
6554 case BUILT_IN_NEXT_ARG:
6555 if (fold_builtin_next_arg (exp, false))
6557 return expand_builtin_next_arg ();
6559 case BUILT_IN_CLEAR_CACHE:
6560 target = expand_builtin___clear_cache (exp);
6565 case BUILT_IN_CLASSIFY_TYPE:
6566 return expand_builtin_classify_type (exp);
6568 case BUILT_IN_CONSTANT_P:
6571 case BUILT_IN_FRAME_ADDRESS:
6572 case BUILT_IN_RETURN_ADDRESS:
6573 return expand_builtin_frame_address (fndecl, exp);
6575 /* Returns the address of the area where the structure is returned.
6577 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6578 if (call_expr_nargs (exp) != 0
6579 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6580 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6583 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6585 case BUILT_IN_ALLOCA:
6586 target = expand_builtin_alloca (exp, target);
6591 case BUILT_IN_STACK_SAVE:
6592 return expand_stack_save ();
6594 case BUILT_IN_STACK_RESTORE:
6595 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6598 case BUILT_IN_BSWAP32:
6599 case BUILT_IN_BSWAP64:
6600 target = expand_builtin_bswap (exp, target, subtarget);
6606 CASE_INT_FN (BUILT_IN_FFS):
6607 case BUILT_IN_FFSIMAX:
6608 target = expand_builtin_unop (target_mode, exp, target,
6609 subtarget, ffs_optab);
6614 CASE_INT_FN (BUILT_IN_CLZ):
6615 case BUILT_IN_CLZIMAX:
6616 target = expand_builtin_unop (target_mode, exp, target,
6617 subtarget, clz_optab);
6622 CASE_INT_FN (BUILT_IN_CTZ):
6623 case BUILT_IN_CTZIMAX:
6624 target = expand_builtin_unop (target_mode, exp, target,
6625 subtarget, ctz_optab);
6630 CASE_INT_FN (BUILT_IN_POPCOUNT):
6631 case BUILT_IN_POPCOUNTIMAX:
6632 target = expand_builtin_unop (target_mode, exp, target,
6633 subtarget, popcount_optab);
6638 CASE_INT_FN (BUILT_IN_PARITY):
6639 case BUILT_IN_PARITYIMAX:
6640 target = expand_builtin_unop (target_mode, exp, target,
6641 subtarget, parity_optab);
6646 case BUILT_IN_STRLEN:
6647 target = expand_builtin_strlen (exp, target, target_mode);
6652 case BUILT_IN_STRCPY:
6653 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6658 case BUILT_IN_STRNCPY:
6659 target = expand_builtin_strncpy (exp, target, mode);
6664 case BUILT_IN_STPCPY:
6665 target = expand_builtin_stpcpy (exp, target, mode);
6670 case BUILT_IN_STRCAT:
6671 target = expand_builtin_strcat (fndecl, exp, target, mode);
6676 case BUILT_IN_STRNCAT:
6677 target = expand_builtin_strncat (exp, target, mode);
6682 case BUILT_IN_STRSPN:
6683 target = expand_builtin_strspn (exp, target, mode);
6688 case BUILT_IN_STRCSPN:
6689 target = expand_builtin_strcspn (exp, target, mode);
6694 case BUILT_IN_STRSTR:
6695 target = expand_builtin_strstr (exp, target, mode);
6700 case BUILT_IN_STRPBRK:
6701 target = expand_builtin_strpbrk (exp, target, mode);
6706 case BUILT_IN_INDEX:
6707 case BUILT_IN_STRCHR:
6708 target = expand_builtin_strchr (exp, target, mode);
6713 case BUILT_IN_RINDEX:
6714 case BUILT_IN_STRRCHR:
6715 target = expand_builtin_strrchr (exp, target, mode);
6720 case BUILT_IN_MEMCPY:
6721 target = expand_builtin_memcpy (exp, target, mode);
6726 case BUILT_IN_MEMPCPY:
6727 target = expand_builtin_mempcpy (exp, target, mode);
6732 case BUILT_IN_MEMMOVE:
6733 target = expand_builtin_memmove (exp, target, mode, ignore);
6738 case BUILT_IN_BCOPY:
6739 target = expand_builtin_bcopy (exp, ignore);
6744 case BUILT_IN_MEMSET:
6745 target = expand_builtin_memset (exp, target, mode);
6750 case BUILT_IN_BZERO:
6751 target = expand_builtin_bzero (exp);
6756 case BUILT_IN_STRCMP:
6757 target = expand_builtin_strcmp (exp, target, mode);
6762 case BUILT_IN_STRNCMP:
6763 target = expand_builtin_strncmp (exp, target, mode);
6768 case BUILT_IN_MEMCHR:
6769 target = expand_builtin_memchr (exp, target, mode);
6775 case BUILT_IN_MEMCMP:
6776 target = expand_builtin_memcmp (exp, target, mode);
6781 case BUILT_IN_SETJMP:
6782 /* This should have been lowered to the builtins below. */
6785 case BUILT_IN_SETJMP_SETUP:
6786 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6787 and the receiver label. */
6788 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6790 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6791 VOIDmode, EXPAND_NORMAL);
6792 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6793 rtx label_r = label_rtx (label);
6795 /* This is copied from the handling of non-local gotos. */
6796 expand_builtin_setjmp_setup (buf_addr, label_r);
6797 nonlocal_goto_handler_labels
6798 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6799 nonlocal_goto_handler_labels);
6800 /* ??? Do not let expand_label treat us as such since we would
6801 not want to be both on the list of non-local labels and on
6802 the list of forced labels. */
6803 FORCED_LABEL (label) = 0;
6808 case BUILT_IN_SETJMP_DISPATCHER:
6809 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6810 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6812 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6813 rtx label_r = label_rtx (label);
6815 /* Remove the dispatcher label from the list of non-local labels
6816 since the receiver labels have been added to it above. */
6817 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6822 case BUILT_IN_SETJMP_RECEIVER:
6823 /* __builtin_setjmp_receiver is passed the receiver label. */
6824 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6826 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6827 rtx label_r = label_rtx (label);
6829 expand_builtin_setjmp_receiver (label_r);
6834 /* __builtin_longjmp is passed a pointer to an array of five words.
6835 It's similar to the C library longjmp function but works with
6836 __builtin_setjmp above. */
6837 case BUILT_IN_LONGJMP:
6838 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6840 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6841 VOIDmode, EXPAND_NORMAL);
6842 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6844 if (value != const1_rtx)
6846 error ("%<__builtin_longjmp%> second argument must be 1");
6850 expand_builtin_longjmp (buf_addr, value);
6855 case BUILT_IN_NONLOCAL_GOTO:
6856 target = expand_builtin_nonlocal_goto (exp);
6861 /* This updates the setjmp buffer that is its argument with the value
6862 of the current stack pointer. */
6863 case BUILT_IN_UPDATE_SETJMP_BUF:
6864 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6867 = expand_normal (CALL_EXPR_ARG (exp, 0));
6869 expand_builtin_update_setjmp_buf (buf_addr);
6875 expand_builtin_trap ();
6878 case BUILT_IN_UNREACHABLE:
6879 expand_builtin_unreachable ();
6882 case BUILT_IN_PRINTF:
6883 target = expand_builtin_printf (exp, target, mode, false);
6888 case BUILT_IN_PRINTF_UNLOCKED:
6889 target = expand_builtin_printf (exp, target, mode, true);
6894 case BUILT_IN_FPUTS:
6895 target = expand_builtin_fputs (exp, target, false);
6899 case BUILT_IN_FPUTS_UNLOCKED:
6900 target = expand_builtin_fputs (exp, target, true);
6905 case BUILT_IN_FPRINTF:
6906 target = expand_builtin_fprintf (exp, target, mode, false);
6911 case BUILT_IN_FPRINTF_UNLOCKED:
6912 target = expand_builtin_fprintf (exp, target, mode, true);
6917 case BUILT_IN_SPRINTF:
6918 target = expand_builtin_sprintf (exp, target, mode);
6923 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6924 case BUILT_IN_SIGNBITD32:
6925 case BUILT_IN_SIGNBITD64:
6926 case BUILT_IN_SIGNBITD128:
6927 target = expand_builtin_signbit (exp, target);
6932 /* Various hooks for the DWARF 2 __throw routine. */
6933 case BUILT_IN_UNWIND_INIT:
6934 expand_builtin_unwind_init ();
6936 case BUILT_IN_DWARF_CFA:
6937 return virtual_cfa_rtx;
6938 #ifdef DWARF2_UNWIND_INFO
6939 case BUILT_IN_DWARF_SP_COLUMN:
6940 return expand_builtin_dwarf_sp_column ();
6941 case BUILT_IN_INIT_DWARF_REG_SIZES:
6942 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6945 case BUILT_IN_FROB_RETURN_ADDR:
6946 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6947 case BUILT_IN_EXTRACT_RETURN_ADDR:
6948 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6949 case BUILT_IN_EH_RETURN:
6950 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6951 CALL_EXPR_ARG (exp, 1));
6953 #ifdef EH_RETURN_DATA_REGNO
6954 case BUILT_IN_EH_RETURN_DATA_REGNO:
6955 return expand_builtin_eh_return_data_regno (exp);
6957 case BUILT_IN_EXTEND_POINTER:
6958 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6959 case BUILT_IN_EH_POINTER:
6960 return expand_builtin_eh_pointer (exp);
6961 case BUILT_IN_EH_FILTER:
6962 return expand_builtin_eh_filter (exp);
6963 case BUILT_IN_EH_COPY_VALUES:
6964 return expand_builtin_eh_copy_values (exp);
6966 case BUILT_IN_VA_START:
6967 return expand_builtin_va_start (exp);
6968 case BUILT_IN_VA_END:
6969 return expand_builtin_va_end (exp);
6970 case BUILT_IN_VA_COPY:
6971 return expand_builtin_va_copy (exp);
6972 case BUILT_IN_EXPECT:
6973 return expand_builtin_expect (exp, target);
6974 case BUILT_IN_PREFETCH:
6975 expand_builtin_prefetch (exp);
6978 case BUILT_IN_PROFILE_FUNC_ENTER:
6979 return expand_builtin_profile_func (false);
6980 case BUILT_IN_PROFILE_FUNC_EXIT:
6981 return expand_builtin_profile_func (true);
6983 case BUILT_IN_INIT_TRAMPOLINE:
6984 return expand_builtin_init_trampoline (exp);
6985 case BUILT_IN_ADJUST_TRAMPOLINE:
6986 return expand_builtin_adjust_trampoline (exp);
6989 case BUILT_IN_EXECL:
6990 case BUILT_IN_EXECV:
6991 case BUILT_IN_EXECLP:
6992 case BUILT_IN_EXECLE:
6993 case BUILT_IN_EXECVP:
6994 case BUILT_IN_EXECVE:
6995 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7000 case BUILT_IN_FETCH_AND_ADD_1:
7001 case BUILT_IN_FETCH_AND_ADD_2:
7002 case BUILT_IN_FETCH_AND_ADD_4:
7003 case BUILT_IN_FETCH_AND_ADD_8:
7004 case BUILT_IN_FETCH_AND_ADD_16:
7005 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
7006 target = expand_builtin_sync_operation (mode, exp, PLUS,
7007 false, target, ignore);
7012 case BUILT_IN_FETCH_AND_SUB_1:
7013 case BUILT_IN_FETCH_AND_SUB_2:
7014 case BUILT_IN_FETCH_AND_SUB_4:
7015 case BUILT_IN_FETCH_AND_SUB_8:
7016 case BUILT_IN_FETCH_AND_SUB_16:
7017 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
7018 target = expand_builtin_sync_operation (mode, exp, MINUS,
7019 false, target, ignore);
7024 case BUILT_IN_FETCH_AND_OR_1:
7025 case BUILT_IN_FETCH_AND_OR_2:
7026 case BUILT_IN_FETCH_AND_OR_4:
7027 case BUILT_IN_FETCH_AND_OR_8:
7028 case BUILT_IN_FETCH_AND_OR_16:
7029 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
7030 target = expand_builtin_sync_operation (mode, exp, IOR,
7031 false, target, ignore);
7036 case BUILT_IN_FETCH_AND_AND_1:
7037 case BUILT_IN_FETCH_AND_AND_2:
7038 case BUILT_IN_FETCH_AND_AND_4:
7039 case BUILT_IN_FETCH_AND_AND_8:
7040 case BUILT_IN_FETCH_AND_AND_16:
7041 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
7042 target = expand_builtin_sync_operation (mode, exp, AND,
7043 false, target, ignore);
7048 case BUILT_IN_FETCH_AND_XOR_1:
7049 case BUILT_IN_FETCH_AND_XOR_2:
7050 case BUILT_IN_FETCH_AND_XOR_4:
7051 case BUILT_IN_FETCH_AND_XOR_8:
7052 case BUILT_IN_FETCH_AND_XOR_16:
7053 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
7054 target = expand_builtin_sync_operation (mode, exp, XOR,
7055 false, target, ignore);
7060 case BUILT_IN_FETCH_AND_NAND_1:
7061 case BUILT_IN_FETCH_AND_NAND_2:
7062 case BUILT_IN_FETCH_AND_NAND_4:
7063 case BUILT_IN_FETCH_AND_NAND_8:
7064 case BUILT_IN_FETCH_AND_NAND_16:
7065 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
7066 target = expand_builtin_sync_operation (mode, exp, NOT,
7067 false, target, ignore);
7072 case BUILT_IN_ADD_AND_FETCH_1:
7073 case BUILT_IN_ADD_AND_FETCH_2:
7074 case BUILT_IN_ADD_AND_FETCH_4:
7075 case BUILT_IN_ADD_AND_FETCH_8:
7076 case BUILT_IN_ADD_AND_FETCH_16:
7077 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
7078 target = expand_builtin_sync_operation (mode, exp, PLUS,
7079 true, target, ignore);
7084 case BUILT_IN_SUB_AND_FETCH_1:
7085 case BUILT_IN_SUB_AND_FETCH_2:
7086 case BUILT_IN_SUB_AND_FETCH_4:
7087 case BUILT_IN_SUB_AND_FETCH_8:
7088 case BUILT_IN_SUB_AND_FETCH_16:
7089 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7090 target = expand_builtin_sync_operation (mode, exp, MINUS,
7091 true, target, ignore);
7096 case BUILT_IN_OR_AND_FETCH_1:
7097 case BUILT_IN_OR_AND_FETCH_2:
7098 case BUILT_IN_OR_AND_FETCH_4:
7099 case BUILT_IN_OR_AND_FETCH_8:
7100 case BUILT_IN_OR_AND_FETCH_16:
7101 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7102 target = expand_builtin_sync_operation (mode, exp, IOR,
7103 true, target, ignore);
7108 case BUILT_IN_AND_AND_FETCH_1:
7109 case BUILT_IN_AND_AND_FETCH_2:
7110 case BUILT_IN_AND_AND_FETCH_4:
7111 case BUILT_IN_AND_AND_FETCH_8:
7112 case BUILT_IN_AND_AND_FETCH_16:
7113 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7114 target = expand_builtin_sync_operation (mode, exp, AND,
7115 true, target, ignore);
7120 case BUILT_IN_XOR_AND_FETCH_1:
7121 case BUILT_IN_XOR_AND_FETCH_2:
7122 case BUILT_IN_XOR_AND_FETCH_4:
7123 case BUILT_IN_XOR_AND_FETCH_8:
7124 case BUILT_IN_XOR_AND_FETCH_16:
7125 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7126 target = expand_builtin_sync_operation (mode, exp, XOR,
7127 true, target, ignore);
7132 case BUILT_IN_NAND_AND_FETCH_1:
7133 case BUILT_IN_NAND_AND_FETCH_2:
7134 case BUILT_IN_NAND_AND_FETCH_4:
7135 case BUILT_IN_NAND_AND_FETCH_8:
7136 case BUILT_IN_NAND_AND_FETCH_16:
7137 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7138 target = expand_builtin_sync_operation (mode, exp, NOT,
7139 true, target, ignore);
7144 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7145 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7146 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7147 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7148 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7149 if (mode == VOIDmode)
7150 mode = TYPE_MODE (boolean_type_node);
7151 if (!target || !register_operand (target, mode))
7152 target = gen_reg_rtx (mode);
7154 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7155 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7160 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7161 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7162 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7163 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7164 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7165 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7166 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7171 case BUILT_IN_LOCK_TEST_AND_SET_1:
7172 case BUILT_IN_LOCK_TEST_AND_SET_2:
7173 case BUILT_IN_LOCK_TEST_AND_SET_4:
7174 case BUILT_IN_LOCK_TEST_AND_SET_8:
7175 case BUILT_IN_LOCK_TEST_AND_SET_16:
7176 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7177 target = expand_builtin_lock_test_and_set (mode, exp, target);
7182 case BUILT_IN_LOCK_RELEASE_1:
7183 case BUILT_IN_LOCK_RELEASE_2:
7184 case BUILT_IN_LOCK_RELEASE_4:
7185 case BUILT_IN_LOCK_RELEASE_8:
7186 case BUILT_IN_LOCK_RELEASE_16:
7187 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7188 expand_builtin_lock_release (mode, exp);
7191 case BUILT_IN_SYNCHRONIZE:
7192 expand_builtin_synchronize ();
7195 case BUILT_IN_OBJECT_SIZE:
7196 return expand_builtin_object_size (exp);
7198 case BUILT_IN_MEMCPY_CHK:
7199 case BUILT_IN_MEMPCPY_CHK:
7200 case BUILT_IN_MEMMOVE_CHK:
7201 case BUILT_IN_MEMSET_CHK:
7202 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7207 case BUILT_IN_STRCPY_CHK:
7208 case BUILT_IN_STPCPY_CHK:
7209 case BUILT_IN_STRNCPY_CHK:
7210 case BUILT_IN_STRCAT_CHK:
7211 case BUILT_IN_STRNCAT_CHK:
7212 case BUILT_IN_SNPRINTF_CHK:
7213 case BUILT_IN_VSNPRINTF_CHK:
7214 maybe_emit_chk_warning (exp, fcode);
7217 case BUILT_IN_SPRINTF_CHK:
7218 case BUILT_IN_VSPRINTF_CHK:
7219 maybe_emit_sprintf_chk_warning (exp, fcode);
7223 maybe_emit_free_warning (exp);
7226 default: /* just do library call, if unknown builtin */
7230 /* The switch statement above can drop through to cause the function
7231 to be called normally. */
7232 return expand_call (exp, target, ignore);
7235 /* Determine whether a tree node represents a call to a built-in
7236 function. If the tree T is a call to a built-in function with
7237 the right number of arguments of the appropriate types, return
7238 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7239 Otherwise the return value is END_BUILTINS. */
7241 enum built_in_function
7242 builtin_mathfn_code (const_tree t)
7244 const_tree fndecl, arg, parmlist;
7245 const_tree argtype, parmtype;
7246 const_call_expr_arg_iterator iter;
7248 if (TREE_CODE (t) != CALL_EXPR
7249 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7250 return END_BUILTINS;
7252 fndecl = get_callee_fndecl (t);
7253 if (fndecl == NULL_TREE
7254 || TREE_CODE (fndecl) != FUNCTION_DECL
7255 || ! DECL_BUILT_IN (fndecl)
7256 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7257 return END_BUILTINS;
7259 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7260 init_const_call_expr_arg_iterator (t, &iter);
7261 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7263 /* If a function doesn't take a variable number of arguments,
7264 the last element in the list will have type `void'. */
7265 parmtype = TREE_VALUE (parmlist);
7266 if (VOID_TYPE_P (parmtype))
7268 if (more_const_call_expr_args_p (&iter))
7269 return END_BUILTINS;
7270 return DECL_FUNCTION_CODE (fndecl);
7273 if (! more_const_call_expr_args_p (&iter))
7274 return END_BUILTINS;
7276 arg = next_const_call_expr_arg (&iter);
7277 argtype = TREE_TYPE (arg);
7279 if (SCALAR_FLOAT_TYPE_P (parmtype))
7281 if (! SCALAR_FLOAT_TYPE_P (argtype))
7282 return END_BUILTINS;
7284 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7286 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7287 return END_BUILTINS;
7289 else if (POINTER_TYPE_P (parmtype))
7291 if (! POINTER_TYPE_P (argtype))
7292 return END_BUILTINS;
7294 else if (INTEGRAL_TYPE_P (parmtype))
7296 if (! INTEGRAL_TYPE_P (argtype))
7297 return END_BUILTINS;
7300 return END_BUILTINS;
7303 /* Variable-length argument list. */
7304 return DECL_FUNCTION_CODE (fndecl);
7307 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7308 evaluate to a constant. */
7311 fold_builtin_constant_p (tree arg)
7313 /* We return 1 for a numeric type that's known to be a constant
7314 value at compile-time or for an aggregate type that's a
7315 literal constant. */
7318 /* If we know this is a constant, emit the constant of one. */
7319 if (CONSTANT_CLASS_P (arg)
7320 || (TREE_CODE (arg) == CONSTRUCTOR
7321 && TREE_CONSTANT (arg)))
7322 return integer_one_node;
7323 if (TREE_CODE (arg) == ADDR_EXPR)
7325 tree op = TREE_OPERAND (arg, 0);
7326 if (TREE_CODE (op) == STRING_CST
7327 || (TREE_CODE (op) == ARRAY_REF
7328 && integer_zerop (TREE_OPERAND (op, 1))
7329 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7330 return integer_one_node;
7333 /* If this expression has side effects, show we don't know it to be a
7334 constant. Likewise if it's a pointer or aggregate type since in
7335 those case we only want literals, since those are only optimized
7336 when generating RTL, not later.
7337 And finally, if we are compiling an initializer, not code, we
7338 need to return a definite result now; there's not going to be any
7339 more optimization done. */
7340 if (TREE_SIDE_EFFECTS (arg)
7341 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7342 || POINTER_TYPE_P (TREE_TYPE (arg))
7344 || folding_initializer)
7345 return integer_zero_node;
7350 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7351 return it as a truthvalue. */
7354 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7356 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7358 fn = built_in_decls[BUILT_IN_EXPECT];
7359 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7360 ret_type = TREE_TYPE (TREE_TYPE (fn));
7361 pred_type = TREE_VALUE (arg_types);
7362 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7364 pred = fold_convert_loc (loc, pred_type, pred);
7365 expected = fold_convert_loc (loc, expected_type, expected);
7366 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7368 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7369 build_int_cst (ret_type, 0));
7372 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7373 NULL_TREE if no simplification is possible. */
7376 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7379 enum tree_code code;
7381 /* If this is a builtin_expect within a builtin_expect keep the
7382 inner one. See through a comparison against a constant. It
7383 might have been added to create a thruthvalue. */
7385 if (COMPARISON_CLASS_P (inner)
7386 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7387 inner = TREE_OPERAND (inner, 0);
7389 if (TREE_CODE (inner) == CALL_EXPR
7390 && (fndecl = get_callee_fndecl (inner))
7391 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7392 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7395 /* Distribute the expected value over short-circuiting operators.
7396 See through the cast from truthvalue_type_node to long. */
7398 while (TREE_CODE (inner) == NOP_EXPR
7399 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7400 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7401 inner = TREE_OPERAND (inner, 0);
7403 code = TREE_CODE (inner);
7404 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7406 tree op0 = TREE_OPERAND (inner, 0);
7407 tree op1 = TREE_OPERAND (inner, 1);
7409 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7410 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7411 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7413 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7416 /* If the argument isn't invariant then there's nothing else we can do. */
7417 if (!TREE_CONSTANT (arg0))
7420 /* If we expect that a comparison against the argument will fold to
7421 a constant return the constant. In practice, this means a true
7422 constant or the address of a non-weak symbol. */
7425 if (TREE_CODE (inner) == ADDR_EXPR)
7429 inner = TREE_OPERAND (inner, 0);
7431 while (TREE_CODE (inner) == COMPONENT_REF
7432 || TREE_CODE (inner) == ARRAY_REF);
7433 if ((TREE_CODE (inner) == VAR_DECL
7434 || TREE_CODE (inner) == FUNCTION_DECL)
7435 && DECL_WEAK (inner))
7439 /* Otherwise, ARG0 already has the proper type for the return value. */
7443 /* Fold a call to __builtin_classify_type with argument ARG. */
7446 fold_builtin_classify_type (tree arg)
7449 return build_int_cst (NULL_TREE, no_type_class);
7451 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7454 /* Fold a call to __builtin_strlen with argument ARG. */
7457 fold_builtin_strlen (location_t loc, tree arg)
7459 if (!validate_arg (arg, POINTER_TYPE))
7463 tree len = c_strlen (arg, 0);
7467 /* Convert from the internal "sizetype" type to "size_t". */
7469 len = fold_convert_loc (loc, size_type_node, len);
7477 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7480 fold_builtin_inf (location_t loc, tree type, int warn)
7482 REAL_VALUE_TYPE real;
7484 /* __builtin_inff is intended to be usable to define INFINITY on all
7485 targets. If an infinity is not available, INFINITY expands "to a
7486 positive constant of type float that overflows at translation
7487 time", footnote "In this case, using INFINITY will violate the
7488 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7489 Thus we pedwarn to ensure this constraint violation is
7491 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7492 pedwarn (loc, 0, "target format does not support infinity");
7495 return build_real (type, real);
7498 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7501 fold_builtin_nan (tree arg, tree type, int quiet)
7503 REAL_VALUE_TYPE real;
7506 if (!validate_arg (arg, POINTER_TYPE))
7508 str = c_getstr (arg);
7512 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7515 return build_real (type, real);
7518 /* Return true if the floating point expression T has an integer value.
7519 We also allow +Inf, -Inf and NaN to be considered integer values. */
7522 integer_valued_real_p (tree t)
7524 switch (TREE_CODE (t))
7531 return integer_valued_real_p (TREE_OPERAND (t, 0));
7536 return integer_valued_real_p (TREE_OPERAND (t, 1));
7543 return integer_valued_real_p (TREE_OPERAND (t, 0))
7544 && integer_valued_real_p (TREE_OPERAND (t, 1));
7547 return integer_valued_real_p (TREE_OPERAND (t, 1))
7548 && integer_valued_real_p (TREE_OPERAND (t, 2));
7551 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7555 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7556 if (TREE_CODE (type) == INTEGER_TYPE)
7558 if (TREE_CODE (type) == REAL_TYPE)
7559 return integer_valued_real_p (TREE_OPERAND (t, 0));
7564 switch (builtin_mathfn_code (t))
7566 CASE_FLT_FN (BUILT_IN_CEIL):
7567 CASE_FLT_FN (BUILT_IN_FLOOR):
7568 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7569 CASE_FLT_FN (BUILT_IN_RINT):
7570 CASE_FLT_FN (BUILT_IN_ROUND):
7571 CASE_FLT_FN (BUILT_IN_TRUNC):
7574 CASE_FLT_FN (BUILT_IN_FMIN):
7575 CASE_FLT_FN (BUILT_IN_FMAX):
7576 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7577 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7590 /* FNDECL is assumed to be a builtin where truncation can be propagated
7591 across (for instance floor((double)f) == (double)floorf (f).
7592 Do the transformation for a call with argument ARG. */
7595 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7597 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7599 if (!validate_arg (arg, REAL_TYPE))
7602 /* Integer rounding functions are idempotent. */
7603 if (fcode == builtin_mathfn_code (arg))
7606 /* If argument is already integer valued, and we don't need to worry
7607 about setting errno, there's no need to perform rounding. */
7608 if (! flag_errno_math && integer_valued_real_p (arg))
7613 tree arg0 = strip_float_extensions (arg);
7614 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7615 tree newtype = TREE_TYPE (arg0);
7618 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7619 && (decl = mathfn_built_in (newtype, fcode)))
7620 return fold_convert_loc (loc, ftype,
7621 build_call_expr_loc (loc, decl, 1,
7622 fold_convert_loc (loc,
7629 /* FNDECL is assumed to be builtin which can narrow the FP type of
7630 the argument, for instance lround((double)f) -> lroundf (f).
7631 Do the transformation for a call with argument ARG. */
7634 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7636 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7638 if (!validate_arg (arg, REAL_TYPE))
7641 /* If argument is already integer valued, and we don't need to worry
7642 about setting errno, there's no need to perform rounding. */
7643 if (! flag_errno_math && integer_valued_real_p (arg))
7644 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7645 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7649 tree ftype = TREE_TYPE (arg);
7650 tree arg0 = strip_float_extensions (arg);
7651 tree newtype = TREE_TYPE (arg0);
7654 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7655 && (decl = mathfn_built_in (newtype, fcode)))
7656 return build_call_expr_loc (loc, decl, 1,
7657 fold_convert_loc (loc, newtype, arg0));
7660 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7661 sizeof (long long) == sizeof (long). */
7662 if (TYPE_PRECISION (long_long_integer_type_node)
7663 == TYPE_PRECISION (long_integer_type_node))
7665 tree newfn = NULL_TREE;
7668 CASE_FLT_FN (BUILT_IN_LLCEIL):
7669 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7672 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7673 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7676 CASE_FLT_FN (BUILT_IN_LLROUND):
7677 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7680 CASE_FLT_FN (BUILT_IN_LLRINT):
7681 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7690 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7691 return fold_convert_loc (loc,
7692 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7699 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7700 return type. Return NULL_TREE if no simplification can be made. */
7703 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7707 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7708 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7711 /* Calculate the result when the argument is a constant. */
7712 if (TREE_CODE (arg) == COMPLEX_CST
7713 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7717 if (TREE_CODE (arg) == COMPLEX_EXPR)
7719 tree real = TREE_OPERAND (arg, 0);
7720 tree imag = TREE_OPERAND (arg, 1);
7722 /* If either part is zero, cabs is fabs of the other. */
7723 if (real_zerop (real))
7724 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7725 if (real_zerop (imag))
7726 return fold_build1_loc (loc, ABS_EXPR, type, real);
7728 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7729 if (flag_unsafe_math_optimizations
7730 && operand_equal_p (real, imag, OEP_PURE_SAME))
7732 const REAL_VALUE_TYPE sqrt2_trunc
7733 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7735 return fold_build2_loc (loc, MULT_EXPR, type,
7736 fold_build1_loc (loc, ABS_EXPR, type, real),
7737 build_real (type, sqrt2_trunc));
7741 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7742 if (TREE_CODE (arg) == NEGATE_EXPR
7743 || TREE_CODE (arg) == CONJ_EXPR)
7744 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7746 /* Don't do this when optimizing for size. */
7747 if (flag_unsafe_math_optimizations
7748 && optimize && optimize_function_for_speed_p (cfun))
7750 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7752 if (sqrtfn != NULL_TREE)
7754 tree rpart, ipart, result;
7756 arg = builtin_save_expr (arg);
7758 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7759 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7761 rpart = builtin_save_expr (rpart);
7762 ipart = builtin_save_expr (ipart);
7764 result = fold_build2_loc (loc, PLUS_EXPR, type,
7765 fold_build2_loc (loc, MULT_EXPR, type,
7767 fold_build2_loc (loc, MULT_EXPR, type,
7770 return build_call_expr_loc (loc, sqrtfn, 1, result);
7777 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7778 Return NULL_TREE if no simplification can be made. */
7781 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7784 enum built_in_function fcode;
7787 if (!validate_arg (arg, REAL_TYPE))
7790 /* Calculate the result when the argument is a constant. */
7791 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7794 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7795 fcode = builtin_mathfn_code (arg);
7796 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7798 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7799 arg = fold_build2_loc (loc, MULT_EXPR, type,
7800 CALL_EXPR_ARG (arg, 0),
7801 build_real (type, dconsthalf));
7802 return build_call_expr_loc (loc, expfn, 1, arg);
7805 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7806 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7808 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7812 tree arg0 = CALL_EXPR_ARG (arg, 0);
7814 /* The inner root was either sqrt or cbrt. */
7815 /* This was a conditional expression but it triggered a bug
7817 REAL_VALUE_TYPE dconstroot;
7818 if (BUILTIN_SQRT_P (fcode))
7819 dconstroot = dconsthalf;
7821 dconstroot = dconst_third ();
7823 /* Adjust for the outer root. */
7824 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7825 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7826 tree_root = build_real (type, dconstroot);
7827 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7831 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7832 if (flag_unsafe_math_optimizations
7833 && (fcode == BUILT_IN_POW
7834 || fcode == BUILT_IN_POWF
7835 || fcode == BUILT_IN_POWL))
7837 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7838 tree arg0 = CALL_EXPR_ARG (arg, 0);
7839 tree arg1 = CALL_EXPR_ARG (arg, 1);
7841 if (!tree_expr_nonnegative_p (arg0))
7842 arg0 = build1 (ABS_EXPR, type, arg0);
7843 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7844 build_real (type, dconsthalf));
7845 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7851 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7852 Return NULL_TREE if no simplification can be made. */
7855 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7857 const enum built_in_function fcode = builtin_mathfn_code (arg);
7860 if (!validate_arg (arg, REAL_TYPE))
7863 /* Calculate the result when the argument is a constant. */
7864 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7867 if (flag_unsafe_math_optimizations)
7869 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7870 if (BUILTIN_EXPONENT_P (fcode))
7872 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7873 const REAL_VALUE_TYPE third_trunc =
7874 real_value_truncate (TYPE_MODE (type), dconst_third ());
7875 arg = fold_build2_loc (loc, MULT_EXPR, type,
7876 CALL_EXPR_ARG (arg, 0),
7877 build_real (type, third_trunc));
7878 return build_call_expr_loc (loc, expfn, 1, arg);
7881 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7882 if (BUILTIN_SQRT_P (fcode))
7884 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7888 tree arg0 = CALL_EXPR_ARG (arg, 0);
7890 REAL_VALUE_TYPE dconstroot = dconst_third ();
7892 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7893 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7894 tree_root = build_real (type, dconstroot);
7895 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7899 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7900 if (BUILTIN_CBRT_P (fcode))
7902 tree arg0 = CALL_EXPR_ARG (arg, 0);
7903 if (tree_expr_nonnegative_p (arg0))
7905 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7910 REAL_VALUE_TYPE dconstroot;
7912 real_arithmetic (&dconstroot, MULT_EXPR,
7913 dconst_third_ptr (), dconst_third_ptr ());
7914 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7915 tree_root = build_real (type, dconstroot);
7916 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7921 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7922 if (fcode == BUILT_IN_POW
7923 || fcode == BUILT_IN_POWF
7924 || fcode == BUILT_IN_POWL)
7926 tree arg00 = CALL_EXPR_ARG (arg, 0);
7927 tree arg01 = CALL_EXPR_ARG (arg, 1);
7928 if (tree_expr_nonnegative_p (arg00))
7930 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7931 const REAL_VALUE_TYPE dconstroot
7932 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7933 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7934 build_real (type, dconstroot));
7935 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7942 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7943 TYPE is the type of the return value. Return NULL_TREE if no
7944 simplification can be made. */
7947 fold_builtin_cos (location_t loc,
7948 tree arg, tree type, tree fndecl)
7952 if (!validate_arg (arg, REAL_TYPE))
7955 /* Calculate the result when the argument is a constant. */
7956 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7959 /* Optimize cos(-x) into cos (x). */
7960 if ((narg = fold_strip_sign_ops (arg)))
7961 return build_call_expr_loc (loc, fndecl, 1, narg);
7966 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7967 Return NULL_TREE if no simplification can be made. */
7970 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7972 if (validate_arg (arg, REAL_TYPE))
7976 /* Calculate the result when the argument is a constant. */
7977 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7980 /* Optimize cosh(-x) into cosh (x). */
7981 if ((narg = fold_strip_sign_ops (arg)))
7982 return build_call_expr_loc (loc, fndecl, 1, narg);
7988 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7989 argument ARG. TYPE is the type of the return value. Return
7990 NULL_TREE if no simplification can be made. */
7993 fold_builtin_ccos (location_t loc,
7994 tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7995 bool hyper ATTRIBUTE_UNUSED)
7997 if (validate_arg (arg, COMPLEX_TYPE)
7998 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8003 /* Calculate the result when the argument is a constant. */
8004 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8008 /* Optimize fn(-x) into fn(x). */
8009 if ((tmp = fold_strip_sign_ops (arg)))
8010 return build_call_expr_loc (loc, fndecl, 1, tmp);
8016 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8017 Return NULL_TREE if no simplification can be made. */
8020 fold_builtin_tan (tree arg, tree type)
8022 enum built_in_function fcode;
8025 if (!validate_arg (arg, REAL_TYPE))
8028 /* Calculate the result when the argument is a constant. */
8029 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8032 /* Optimize tan(atan(x)) = x. */
8033 fcode = builtin_mathfn_code (arg);
8034 if (flag_unsafe_math_optimizations
8035 && (fcode == BUILT_IN_ATAN
8036 || fcode == BUILT_IN_ATANF
8037 || fcode == BUILT_IN_ATANL))
8038 return CALL_EXPR_ARG (arg, 0);
8043 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8044 NULL_TREE if no simplification can be made. */
8047 fold_builtin_sincos (location_t loc,
8048 tree arg0, tree arg1, tree arg2)
8053 if (!validate_arg (arg0, REAL_TYPE)
8054 || !validate_arg (arg1, POINTER_TYPE)
8055 || !validate_arg (arg2, POINTER_TYPE))
8058 type = TREE_TYPE (arg0);
8060 /* Calculate the result when the argument is a constant. */
8061 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8064 /* Canonicalize sincos to cexpi. */
8065 if (!TARGET_C99_FUNCTIONS)
8067 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8071 call = build_call_expr_loc (loc, fn, 1, arg0);
8072 call = builtin_save_expr (call);
8074 return build2 (COMPOUND_EXPR, void_type_node,
8075 build2 (MODIFY_EXPR, void_type_node,
8076 build_fold_indirect_ref_loc (loc, arg1),
8077 build1 (IMAGPART_EXPR, type, call)),
8078 build2 (MODIFY_EXPR, void_type_node,
8079 build_fold_indirect_ref_loc (loc, arg2),
8080 build1 (REALPART_EXPR, type, call)));
8083 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8084 NULL_TREE if no simplification can be made. */
8087 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8090 tree realp, imagp, ifn;
8095 if (!validate_arg (arg0, COMPLEX_TYPE)
8096 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8100 /* Calculate the result when the argument is a constant. */
8101 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8105 rtype = TREE_TYPE (TREE_TYPE (arg0));
8107 /* In case we can figure out the real part of arg0 and it is constant zero
8109 if (!TARGET_C99_FUNCTIONS)
8111 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8115 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8116 && real_zerop (realp))
8118 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8119 return build_call_expr_loc (loc, ifn, 1, narg);
8122 /* In case we can easily decompose real and imaginary parts split cexp
8123 to exp (r) * cexpi (i). */
8124 if (flag_unsafe_math_optimizations
8127 tree rfn, rcall, icall;
8129 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8133 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8137 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8138 icall = builtin_save_expr (icall);
8139 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8140 rcall = builtin_save_expr (rcall);
8141 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8142 fold_build2_loc (loc, MULT_EXPR, rtype,
8144 fold_build1_loc (loc, REALPART_EXPR,
8146 fold_build2_loc (loc, MULT_EXPR, rtype,
8148 fold_build1_loc (loc, IMAGPART_EXPR,
8155 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8156 Return NULL_TREE if no simplification can be made. */
8159 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8161 if (!validate_arg (arg, REAL_TYPE))
8164 /* Optimize trunc of constant value. */
8165 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8167 REAL_VALUE_TYPE r, x;
8168 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8170 x = TREE_REAL_CST (arg);
8171 real_trunc (&r, TYPE_MODE (type), &x);
8172 return build_real (type, r);
8175 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8178 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8179 Return NULL_TREE if no simplification can be made. */
8182 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8184 if (!validate_arg (arg, REAL_TYPE))
8187 /* Optimize floor of constant value. */
8188 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8192 x = TREE_REAL_CST (arg);
8193 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8195 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8198 real_floor (&r, TYPE_MODE (type), &x);
8199 return build_real (type, r);
8203 /* Fold floor (x) where x is nonnegative to trunc (x). */
8204 if (tree_expr_nonnegative_p (arg))
8206 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8208 return build_call_expr_loc (loc, truncfn, 1, arg);
8211 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8214 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8215 Return NULL_TREE if no simplification can be made. */
8218 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8220 if (!validate_arg (arg, REAL_TYPE))
8223 /* Optimize ceil of constant value. */
8224 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8228 x = TREE_REAL_CST (arg);
8229 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8231 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8234 real_ceil (&r, TYPE_MODE (type), &x);
8235 return build_real (type, r);
8239 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8242 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8243 Return NULL_TREE if no simplification can be made. */
8246 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8248 if (!validate_arg (arg, REAL_TYPE))
8251 /* Optimize round of constant value. */
8252 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8256 x = TREE_REAL_CST (arg);
8257 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8259 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8262 real_round (&r, TYPE_MODE (type), &x);
8263 return build_real (type, r);
8267 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8270 /* Fold function call to builtin lround, lroundf or lroundl (or the
8271 corresponding long long versions) and other rounding functions. ARG
8272 is the argument to the call. Return NULL_TREE if no simplification
8276 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8278 if (!validate_arg (arg, REAL_TYPE))
8281 /* Optimize lround of constant value. */
8282 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8284 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8286 if (real_isfinite (&x))
8288 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8289 tree ftype = TREE_TYPE (arg);
8290 unsigned HOST_WIDE_INT lo2;
8291 HOST_WIDE_INT hi, lo;
8294 switch (DECL_FUNCTION_CODE (fndecl))
8296 CASE_FLT_FN (BUILT_IN_LFLOOR):
8297 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8298 real_floor (&r, TYPE_MODE (ftype), &x);
8301 CASE_FLT_FN (BUILT_IN_LCEIL):
8302 CASE_FLT_FN (BUILT_IN_LLCEIL):
8303 real_ceil (&r, TYPE_MODE (ftype), &x);
8306 CASE_FLT_FN (BUILT_IN_LROUND):
8307 CASE_FLT_FN (BUILT_IN_LLROUND):
8308 real_round (&r, TYPE_MODE (ftype), &x);
8315 REAL_VALUE_TO_INT (&lo, &hi, r);
8316 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8317 return build_int_cst_wide (itype, lo2, hi);
8321 switch (DECL_FUNCTION_CODE (fndecl))
8323 CASE_FLT_FN (BUILT_IN_LFLOOR):
8324 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8325 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8326 if (tree_expr_nonnegative_p (arg))
8327 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8328 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8333 return fold_fixed_mathfn (loc, fndecl, arg);
8336 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8337 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8338 the argument to the call. Return NULL_TREE if no simplification can
8342 fold_builtin_bitop (tree fndecl, tree arg)
8344 if (!validate_arg (arg, INTEGER_TYPE))
8347 /* Optimize for constant argument. */
8348 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8350 HOST_WIDE_INT hi, width, result;
8351 unsigned HOST_WIDE_INT lo;
8354 type = TREE_TYPE (arg);
8355 width = TYPE_PRECISION (type);
8356 lo = TREE_INT_CST_LOW (arg);
8358 /* Clear all the bits that are beyond the type's precision. */
8359 if (width > HOST_BITS_PER_WIDE_INT)
8361 hi = TREE_INT_CST_HIGH (arg);
8362 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8363 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8368 if (width < HOST_BITS_PER_WIDE_INT)
8369 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8372 switch (DECL_FUNCTION_CODE (fndecl))
8374 CASE_INT_FN (BUILT_IN_FFS):
8376 result = exact_log2 (lo & -lo) + 1;
8378 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8383 CASE_INT_FN (BUILT_IN_CLZ):
8385 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8387 result = width - floor_log2 (lo) - 1;
8388 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8392 CASE_INT_FN (BUILT_IN_CTZ):
8394 result = exact_log2 (lo & -lo);
8396 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8397 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8401 CASE_INT_FN (BUILT_IN_POPCOUNT):
8404 result++, lo &= lo - 1;
8406 result++, hi &= hi - 1;
8409 CASE_INT_FN (BUILT_IN_PARITY):
8412 result++, lo &= lo - 1;
8414 result++, hi &= hi - 1;
8422 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8428 /* Fold function call to builtin_bswap and the long and long long
8429 variants. Return NULL_TREE if no simplification can be made. */
8431 fold_builtin_bswap (tree fndecl, tree arg)
8433 if (! validate_arg (arg, INTEGER_TYPE))
8436 /* Optimize constant value. */
8437 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8439 HOST_WIDE_INT hi, width, r_hi = 0;
8440 unsigned HOST_WIDE_INT lo, r_lo = 0;
8443 type = TREE_TYPE (arg);
8444 width = TYPE_PRECISION (type);
8445 lo = TREE_INT_CST_LOW (arg);
8446 hi = TREE_INT_CST_HIGH (arg);
8448 switch (DECL_FUNCTION_CODE (fndecl))
8450 case BUILT_IN_BSWAP32:
8451 case BUILT_IN_BSWAP64:
8455 for (s = 0; s < width; s += 8)
8457 int d = width - s - 8;
8458 unsigned HOST_WIDE_INT byte;
8460 if (s < HOST_BITS_PER_WIDE_INT)
8461 byte = (lo >> s) & 0xff;
8463 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8465 if (d < HOST_BITS_PER_WIDE_INT)
8468 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8478 if (width < HOST_BITS_PER_WIDE_INT)
8479 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8481 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8487 /* A subroutine of fold_builtin to fold the various logarithmic
8488 functions. Return NULL_TREE if no simplification can me made.
8489 FUNC is the corresponding MPFR logarithm function. */
8492 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8493 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8495 if (validate_arg (arg, REAL_TYPE))
8497 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8499 const enum built_in_function fcode = builtin_mathfn_code (arg);
8501 /* Calculate the result when the argument is a constant. */
8502 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8505 /* Special case, optimize logN(expN(x)) = x. */
8506 if (flag_unsafe_math_optimizations
8507 && ((func == mpfr_log
8508 && (fcode == BUILT_IN_EXP
8509 || fcode == BUILT_IN_EXPF
8510 || fcode == BUILT_IN_EXPL))
8511 || (func == mpfr_log2
8512 && (fcode == BUILT_IN_EXP2
8513 || fcode == BUILT_IN_EXP2F
8514 || fcode == BUILT_IN_EXP2L))
8515 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8516 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8518 /* Optimize logN(func()) for various exponential functions. We
8519 want to determine the value "x" and the power "exponent" in
8520 order to transform logN(x**exponent) into exponent*logN(x). */
8521 if (flag_unsafe_math_optimizations)
8523 tree exponent = 0, x = 0;
8527 CASE_FLT_FN (BUILT_IN_EXP):
8528 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8529 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8531 exponent = CALL_EXPR_ARG (arg, 0);
8533 CASE_FLT_FN (BUILT_IN_EXP2):
8534 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8535 x = build_real (type, dconst2);
8536 exponent = CALL_EXPR_ARG (arg, 0);
8538 CASE_FLT_FN (BUILT_IN_EXP10):
8539 CASE_FLT_FN (BUILT_IN_POW10):
8540 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8542 REAL_VALUE_TYPE dconst10;
8543 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8544 x = build_real (type, dconst10);
8546 exponent = CALL_EXPR_ARG (arg, 0);
8548 CASE_FLT_FN (BUILT_IN_SQRT):
8549 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8550 x = CALL_EXPR_ARG (arg, 0);
8551 exponent = build_real (type, dconsthalf);
8553 CASE_FLT_FN (BUILT_IN_CBRT):
8554 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8555 x = CALL_EXPR_ARG (arg, 0);
8556 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8559 CASE_FLT_FN (BUILT_IN_POW):
8560 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8561 x = CALL_EXPR_ARG (arg, 0);
8562 exponent = CALL_EXPR_ARG (arg, 1);
8568 /* Now perform the optimization. */
8571 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8572 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8580 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8581 NULL_TREE if no simplification can be made. */
8584 fold_builtin_hypot (location_t loc, tree fndecl,
8585 tree arg0, tree arg1, tree type)
8587 tree res, narg0, narg1;
8589 if (!validate_arg (arg0, REAL_TYPE)
8590 || !validate_arg (arg1, REAL_TYPE))
8593 /* Calculate the result when the argument is a constant. */
8594 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8597 /* If either argument to hypot has a negate or abs, strip that off.
8598 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8599 narg0 = fold_strip_sign_ops (arg0);
8600 narg1 = fold_strip_sign_ops (arg1);
8603 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8604 narg1 ? narg1 : arg1);
8607 /* If either argument is zero, hypot is fabs of the other. */
8608 if (real_zerop (arg0))
8609 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8610 else if (real_zerop (arg1))
8611 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8613 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8614 if (flag_unsafe_math_optimizations
8615 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8617 const REAL_VALUE_TYPE sqrt2_trunc
8618 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8619 return fold_build2_loc (loc, MULT_EXPR, type,
8620 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8621 build_real (type, sqrt2_trunc));
8628 /* Fold a builtin function call to pow, powf, or powl. Return
8629 NULL_TREE if no simplification can be made. */
8631 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8635 if (!validate_arg (arg0, REAL_TYPE)
8636 || !validate_arg (arg1, REAL_TYPE))
8639 /* Calculate the result when the argument is a constant. */
8640 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8643 /* Optimize pow(1.0,y) = 1.0. */
8644 if (real_onep (arg0))
8645 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8647 if (TREE_CODE (arg1) == REAL_CST
8648 && !TREE_OVERFLOW (arg1))
8650 REAL_VALUE_TYPE cint;
8654 c = TREE_REAL_CST (arg1);
8656 /* Optimize pow(x,0.0) = 1.0. */
8657 if (REAL_VALUES_EQUAL (c, dconst0))
8658 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8661 /* Optimize pow(x,1.0) = x. */
8662 if (REAL_VALUES_EQUAL (c, dconst1))
8665 /* Optimize pow(x,-1.0) = 1.0/x. */
8666 if (REAL_VALUES_EQUAL (c, dconstm1))
8667 return fold_build2_loc (loc, RDIV_EXPR, type,
8668 build_real (type, dconst1), arg0);
8670 /* Optimize pow(x,0.5) = sqrt(x). */
8671 if (flag_unsafe_math_optimizations
8672 && REAL_VALUES_EQUAL (c, dconsthalf))
8674 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8676 if (sqrtfn != NULL_TREE)
8677 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8680 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8681 if (flag_unsafe_math_optimizations)
8683 const REAL_VALUE_TYPE dconstroot
8684 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8686 if (REAL_VALUES_EQUAL (c, dconstroot))
8688 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8689 if (cbrtfn != NULL_TREE)
8690 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8694 /* Check for an integer exponent. */
8695 n = real_to_integer (&c);
8696 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8697 if (real_identical (&c, &cint))
8699 /* Attempt to evaluate pow at compile-time, unless this should
8700 raise an exception. */
8701 if (TREE_CODE (arg0) == REAL_CST
8702 && !TREE_OVERFLOW (arg0)
8704 || (!flag_trapping_math && !flag_errno_math)
8705 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8710 x = TREE_REAL_CST (arg0);
8711 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8712 if (flag_unsafe_math_optimizations || !inexact)
8713 return build_real (type, x);
8716 /* Strip sign ops from even integer powers. */
8717 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8719 tree narg0 = fold_strip_sign_ops (arg0);
8721 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8726 if (flag_unsafe_math_optimizations)
8728 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8730 /* Optimize pow(expN(x),y) = expN(x*y). */
8731 if (BUILTIN_EXPONENT_P (fcode))
8733 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8734 tree arg = CALL_EXPR_ARG (arg0, 0);
8735 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8736 return build_call_expr_loc (loc, expfn, 1, arg);
8739 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8740 if (BUILTIN_SQRT_P (fcode))
8742 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8743 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8744 build_real (type, dconsthalf));
8745 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8748 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8749 if (BUILTIN_CBRT_P (fcode))
8751 tree arg = CALL_EXPR_ARG (arg0, 0);
8752 if (tree_expr_nonnegative_p (arg))
8754 const REAL_VALUE_TYPE dconstroot
8755 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8756 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8757 build_real (type, dconstroot));
8758 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8762 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8763 if (fcode == BUILT_IN_POW
8764 || fcode == BUILT_IN_POWF
8765 || fcode == BUILT_IN_POWL)
8767 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8768 if (tree_expr_nonnegative_p (arg00))
8770 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8771 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8772 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8780 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8781 Return NULL_TREE if no simplification can be made. */
8783 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8784 tree arg0, tree arg1, tree type)
8786 if (!validate_arg (arg0, REAL_TYPE)
8787 || !validate_arg (arg1, INTEGER_TYPE))
8790 /* Optimize pow(1.0,y) = 1.0. */
8791 if (real_onep (arg0))
8792 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8794 if (host_integerp (arg1, 0))
8796 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8798 /* Evaluate powi at compile-time. */
8799 if (TREE_CODE (arg0) == REAL_CST
8800 && !TREE_OVERFLOW (arg0))
8803 x = TREE_REAL_CST (arg0);
8804 real_powi (&x, TYPE_MODE (type), &x, c);
8805 return build_real (type, x);
8808 /* Optimize pow(x,0) = 1.0. */
8810 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8813 /* Optimize pow(x,1) = x. */
8817 /* Optimize pow(x,-1) = 1.0/x. */
8819 return fold_build2_loc (loc, RDIV_EXPR, type,
8820 build_real (type, dconst1), arg0);
8826 /* A subroutine of fold_builtin to fold the various exponent
8827 functions. Return NULL_TREE if no simplification can be made.
8828 FUNC is the corresponding MPFR exponent function. */
8831 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8832 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8834 if (validate_arg (arg, REAL_TYPE))
8836 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8839 /* Calculate the result when the argument is a constant. */
8840 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8843 /* Optimize expN(logN(x)) = x. */
8844 if (flag_unsafe_math_optimizations)
8846 const enum built_in_function fcode = builtin_mathfn_code (arg);
8848 if ((func == mpfr_exp
8849 && (fcode == BUILT_IN_LOG
8850 || fcode == BUILT_IN_LOGF
8851 || fcode == BUILT_IN_LOGL))
8852 || (func == mpfr_exp2
8853 && (fcode == BUILT_IN_LOG2
8854 || fcode == BUILT_IN_LOG2F
8855 || fcode == BUILT_IN_LOG2L))
8856 || (func == mpfr_exp10
8857 && (fcode == BUILT_IN_LOG10
8858 || fcode == BUILT_IN_LOG10F
8859 || fcode == BUILT_IN_LOG10L)))
8860 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8867 /* Return true if VAR is a VAR_DECL or a component thereof. */
8870 var_decl_component_p (tree var)
8873 while (handled_component_p (inner))
8874 inner = TREE_OPERAND (inner, 0);
8875 return SSA_VAR_P (inner);
8878 /* Fold function call to builtin memset. Return
8879 NULL_TREE if no simplification can be made. */
8882 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8883 tree type, bool ignore)
8885 tree var, ret, etype;
8886 unsigned HOST_WIDE_INT length, cval;
8888 if (! validate_arg (dest, POINTER_TYPE)
8889 || ! validate_arg (c, INTEGER_TYPE)
8890 || ! validate_arg (len, INTEGER_TYPE))
8893 if (! host_integerp (len, 1))
8896 /* If the LEN parameter is zero, return DEST. */
8897 if (integer_zerop (len))
8898 return omit_one_operand_loc (loc, type, dest, c);
8900 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8905 if (TREE_CODE (var) != ADDR_EXPR)
8908 var = TREE_OPERAND (var, 0);
8909 if (TREE_THIS_VOLATILE (var))
8912 etype = TREE_TYPE (var);
8913 if (TREE_CODE (etype) == ARRAY_TYPE)
8914 etype = TREE_TYPE (etype);
8916 if (!INTEGRAL_TYPE_P (etype)
8917 && !POINTER_TYPE_P (etype))
8920 if (! var_decl_component_p (var))
8923 length = tree_low_cst (len, 1);
8924 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8925 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8929 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8932 if (integer_zerop (c))
8936 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8939 cval = tree_low_cst (c, 1);
8943 cval |= (cval << 31) << 1;
8946 ret = build_int_cst_type (etype, cval);
8947 var = build_fold_indirect_ref_loc (loc,
8948 fold_convert_loc (loc,
8949 build_pointer_type (etype),
8951 ret = build2 (MODIFY_EXPR, etype, var, ret);
8955 return omit_one_operand_loc (loc, type, dest, ret);
8958 /* Fold function call to builtin memset. Return
8959 NULL_TREE if no simplification can be made. */
8962 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8964 if (! validate_arg (dest, POINTER_TYPE)
8965 || ! validate_arg (size, INTEGER_TYPE))
8971 /* New argument list transforming bzero(ptr x, int y) to
8972 memset(ptr x, int 0, size_t y). This is done this way
8973 so that if it isn't expanded inline, we fallback to
8974 calling bzero instead of memset. */
8976 return fold_builtin_memset (loc, dest, integer_zero_node,
8977 fold_convert_loc (loc, sizetype, size),
8978 void_type_node, ignore);
8981 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8982 NULL_TREE if no simplification can be made.
8983 If ENDP is 0, return DEST (like memcpy).
8984 If ENDP is 1, return DEST+LEN (like mempcpy).
8985 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8986 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8990 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8991 tree len, tree type, bool ignore, int endp)
8993 tree destvar, srcvar, expr;
8995 if (! validate_arg (dest, POINTER_TYPE)
8996 || ! validate_arg (src, POINTER_TYPE)
8997 || ! validate_arg (len, INTEGER_TYPE))
9000 /* If the LEN parameter is zero, return DEST. */
9001 if (integer_zerop (len))
9002 return omit_one_operand_loc (loc, type, dest, src);
9004 /* If SRC and DEST are the same (and not volatile), return
9005 DEST{,+LEN,+LEN-1}. */
9006 if (operand_equal_p (src, dest, 0))
9010 tree srctype, desttype;
9011 int src_align, dest_align;
9015 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9016 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9018 /* Both DEST and SRC must be pointer types.
9019 ??? This is what old code did. Is the testing for pointer types
9022 If either SRC is readonly or length is 1, we can use memcpy. */
9023 if (!dest_align || !src_align)
9025 if (readonly_data_expr (src)
9026 || (host_integerp (len, 1)
9027 && (MIN (src_align, dest_align) / BITS_PER_UNIT
9028 >= tree_low_cst (len, 1))))
9030 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9033 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9036 /* If *src and *dest can't overlap, optimize into memcpy as well. */
9037 srcvar = build_fold_indirect_ref_loc (loc, src);
9038 destvar = build_fold_indirect_ref_loc (loc, dest);
9040 && !TREE_THIS_VOLATILE (srcvar)
9042 && !TREE_THIS_VOLATILE (destvar))
9044 tree src_base, dest_base, fn;
9045 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
9046 HOST_WIDE_INT size = -1;
9047 HOST_WIDE_INT maxsize = -1;
9050 if (handled_component_p (src_base))
9051 src_base = get_ref_base_and_extent (src_base, &src_offset,
9053 dest_base = destvar;
9054 if (handled_component_p (dest_base))
9055 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
9057 if (host_integerp (len, 1))
9059 maxsize = tree_low_cst (len, 1);
9061 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
9064 maxsize *= BITS_PER_UNIT;
9068 if (SSA_VAR_P (src_base)
9069 && SSA_VAR_P (dest_base))
9071 if (operand_equal_p (src_base, dest_base, 0)
9072 && ranges_overlap_p (src_offset, maxsize,
9073 dest_offset, maxsize))
9076 else if (TREE_CODE (src_base) == INDIRECT_REF
9077 && TREE_CODE (dest_base) == INDIRECT_REF)
9079 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
9080 TREE_OPERAND (dest_base, 0), 0)
9081 || ranges_overlap_p (src_offset, maxsize,
9082 dest_offset, maxsize))
9088 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9091 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9096 if (!host_integerp (len, 0))
9099 This logic lose for arguments like (type *)malloc (sizeof (type)),
9100 since we strip the casts of up to VOID return value from malloc.
9101 Perhaps we ought to inherit type from non-VOID argument here? */
9104 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
9105 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
9107 tree tem = TREE_OPERAND (src, 0);
9109 if (tem != TREE_OPERAND (src, 0))
9110 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
9112 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
9114 tree tem = TREE_OPERAND (dest, 0);
9116 if (tem != TREE_OPERAND (dest, 0))
9117 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
9119 srctype = TREE_TYPE (TREE_TYPE (src));
9121 && TREE_CODE (srctype) == ARRAY_TYPE
9122 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9124 srctype = TREE_TYPE (srctype);
9126 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
9128 desttype = TREE_TYPE (TREE_TYPE (dest));
9130 && TREE_CODE (desttype) == ARRAY_TYPE
9131 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9133 desttype = TREE_TYPE (desttype);
9135 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
9137 if (!srctype || !desttype
9138 || !TYPE_SIZE_UNIT (srctype)
9139 || !TYPE_SIZE_UNIT (desttype)
9140 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
9141 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
9142 || TYPE_VOLATILE (srctype)
9143 || TYPE_VOLATILE (desttype))
9146 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9147 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9148 if (dest_align < (int) TYPE_ALIGN (desttype)
9149 || src_align < (int) TYPE_ALIGN (srctype))
9153 dest = builtin_save_expr (dest);
9156 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9158 srcvar = build_fold_indirect_ref_loc (loc, src);
9159 if (TREE_THIS_VOLATILE (srcvar))
9161 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
9163 /* With memcpy, it is possible to bypass aliasing rules, so without
9164 this check i.e. execute/20060930-2.c would be misoptimized,
9165 because it use conflicting alias set to hold argument for the
9166 memcpy call. This check is probably unnecessary with
9167 -fno-strict-aliasing. Similarly for destvar. See also
9169 else if (!var_decl_component_p (srcvar))
9173 destvar = NULL_TREE;
9174 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9176 destvar = build_fold_indirect_ref_loc (loc, dest);
9177 if (TREE_THIS_VOLATILE (destvar))
9179 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
9180 destvar = NULL_TREE;
9181 else if (!var_decl_component_p (destvar))
9182 destvar = NULL_TREE;
9185 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9188 if (srcvar == NULL_TREE)
9191 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9194 srctype = build_qualified_type (desttype, 0);
9195 if (src_align < (int) TYPE_ALIGN (srctype))
9197 if (AGGREGATE_TYPE_P (srctype)
9198 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9201 srctype = build_variant_type_copy (srctype);
9202 TYPE_ALIGN (srctype) = src_align;
9203 TYPE_USER_ALIGN (srctype) = 1;
9204 TYPE_PACKED (srctype) = 1;
9206 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9207 src = fold_convert_loc (loc, srcptype, src);
9208 srcvar = build_fold_indirect_ref_loc (loc, src);
9210 else if (destvar == NULL_TREE)
9213 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9216 desttype = build_qualified_type (srctype, 0);
9217 if (dest_align < (int) TYPE_ALIGN (desttype))
9219 if (AGGREGATE_TYPE_P (desttype)
9220 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9223 desttype = build_variant_type_copy (desttype);
9224 TYPE_ALIGN (desttype) = dest_align;
9225 TYPE_USER_ALIGN (desttype) = 1;
9226 TYPE_PACKED (desttype) = 1;
9228 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9229 dest = fold_convert_loc (loc, destptype, dest);
9230 destvar = build_fold_indirect_ref_loc (loc, dest);
9233 if (srctype == desttype
9234 || (gimple_in_ssa_p (cfun)
9235 && useless_type_conversion_p (desttype, srctype)))
9237 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9238 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9239 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9240 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9241 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
9243 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
9244 TREE_TYPE (destvar), srcvar);
9245 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9251 if (endp == 0 || endp == 3)
9252 return omit_one_operand_loc (loc, type, dest, expr);
9258 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9261 len = fold_convert_loc (loc, sizetype, len);
9262 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9263 dest = fold_convert_loc (loc, type, dest);
9265 dest = omit_one_operand_loc (loc, type, dest, expr);
9269 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9270 If LEN is not NULL, it represents the length of the string to be
9271 copied. Return NULL_TREE if no simplification can be made. */
9274 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9278 if (!validate_arg (dest, POINTER_TYPE)
9279 || !validate_arg (src, POINTER_TYPE))
9282 /* If SRC and DEST are the same (and not volatile), return DEST. */
9283 if (operand_equal_p (src, dest, 0))
9284 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9286 if (optimize_function_for_size_p (cfun))
9289 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9295 len = c_strlen (src, 1);
9296 if (! len || TREE_SIDE_EFFECTS (len))
9300 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
9301 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9302 build_call_expr_loc (loc, fn, 3, dest, src, len));
9305 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9306 If SLEN is not NULL, it represents the length of the source string.
9307 Return NULL_TREE if no simplification can be made. */
9310 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9311 tree src, tree len, tree slen)
9315 if (!validate_arg (dest, POINTER_TYPE)
9316 || !validate_arg (src, POINTER_TYPE)
9317 || !validate_arg (len, INTEGER_TYPE))
9320 /* If the LEN parameter is zero, return DEST. */
9321 if (integer_zerop (len))
9322 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9324 /* We can't compare slen with len as constants below if len is not a
9326 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9330 slen = c_strlen (src, 1);
9332 /* Now, we must be passed a constant src ptr parameter. */
9333 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9336 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9338 /* We do not support simplification of this case, though we do
9339 support it when expanding trees into RTL. */
9340 /* FIXME: generate a call to __builtin_memset. */
9341 if (tree_int_cst_lt (slen, len))
9344 /* OK transform into builtin memcpy. */
9345 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9348 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9349 build_call_expr_loc (loc, fn, 3, dest, src, len));
9352 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9353 arguments to the call, and TYPE is its return type.
9354 Return NULL_TREE if no simplification can be made. */
9357 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9359 if (!validate_arg (arg1, POINTER_TYPE)
9360 || !validate_arg (arg2, INTEGER_TYPE)
9361 || !validate_arg (len, INTEGER_TYPE))
9367 if (TREE_CODE (arg2) != INTEGER_CST
9368 || !host_integerp (len, 1))
9371 p1 = c_getstr (arg1);
9372 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9378 if (target_char_cast (arg2, &c))
9381 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9384 return build_int_cst (TREE_TYPE (arg1), 0);
9386 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9388 return fold_convert_loc (loc, type, tem);
9394 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9395 Return NULL_TREE if no simplification can be made. */
9398 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9400 const char *p1, *p2;
9402 if (!validate_arg (arg1, POINTER_TYPE)
9403 || !validate_arg (arg2, POINTER_TYPE)
9404 || !validate_arg (len, INTEGER_TYPE))
9407 /* If the LEN parameter is zero, return zero. */
9408 if (integer_zerop (len))
9409 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9412 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9413 if (operand_equal_p (arg1, arg2, 0))
9414 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9416 p1 = c_getstr (arg1);
9417 p2 = c_getstr (arg2);
9419 /* If all arguments are constant, and the value of len is not greater
9420 than the lengths of arg1 and arg2, evaluate at compile-time. */
9421 if (host_integerp (len, 1) && p1 && p2
9422 && compare_tree_int (len, strlen (p1) + 1) <= 0
9423 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9425 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9428 return integer_one_node;
9430 return integer_minus_one_node;
9432 return integer_zero_node;
9435 /* If len parameter is one, return an expression corresponding to
9436 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9437 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9439 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9440 tree cst_uchar_ptr_node
9441 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9444 = fold_convert_loc (loc, integer_type_node,
9445 build1 (INDIRECT_REF, cst_uchar_node,
9446 fold_convert_loc (loc,
9450 = fold_convert_loc (loc, integer_type_node,
9451 build1 (INDIRECT_REF, cst_uchar_node,
9452 fold_convert_loc (loc,
9455 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9461 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9462 Return NULL_TREE if no simplification can be made. */
9465 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9467 const char *p1, *p2;
9469 if (!validate_arg (arg1, POINTER_TYPE)
9470 || !validate_arg (arg2, POINTER_TYPE))
9473 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9474 if (operand_equal_p (arg1, arg2, 0))
9475 return integer_zero_node;
9477 p1 = c_getstr (arg1);
9478 p2 = c_getstr (arg2);
9482 const int i = strcmp (p1, p2);
9484 return integer_minus_one_node;
9486 return integer_one_node;
9488 return integer_zero_node;
9491 /* If the second arg is "", return *(const unsigned char*)arg1. */
9492 if (p2 && *p2 == '\0')
9494 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9495 tree cst_uchar_ptr_node
9496 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9498 return fold_convert_loc (loc, integer_type_node,
9499 build1 (INDIRECT_REF, cst_uchar_node,
9500 fold_convert_loc (loc,
9505 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9506 if (p1 && *p1 == '\0')
9508 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9509 tree cst_uchar_ptr_node
9510 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9513 = fold_convert_loc (loc, integer_type_node,
9514 build1 (INDIRECT_REF, cst_uchar_node,
9515 fold_convert_loc (loc,
9518 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9524 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9525 Return NULL_TREE if no simplification can be made. */
9528 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9530 const char *p1, *p2;
9532 if (!validate_arg (arg1, POINTER_TYPE)
9533 || !validate_arg (arg2, POINTER_TYPE)
9534 || !validate_arg (len, INTEGER_TYPE))
9537 /* If the LEN parameter is zero, return zero. */
9538 if (integer_zerop (len))
9539 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9542 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9543 if (operand_equal_p (arg1, arg2, 0))
9544 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9546 p1 = c_getstr (arg1);
9547 p2 = c_getstr (arg2);
9549 if (host_integerp (len, 1) && p1 && p2)
9551 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9553 return integer_one_node;
9555 return integer_minus_one_node;
9557 return integer_zero_node;
9560 /* If the second arg is "", and the length is greater than zero,
9561 return *(const unsigned char*)arg1. */
9562 if (p2 && *p2 == '\0'
9563 && TREE_CODE (len) == INTEGER_CST
9564 && tree_int_cst_sgn (len) == 1)
9566 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9567 tree cst_uchar_ptr_node
9568 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9570 return fold_convert_loc (loc, integer_type_node,
9571 build1 (INDIRECT_REF, cst_uchar_node,
9572 fold_convert_loc (loc,
9577 /* If the first arg is "", and the length is greater than zero,
9578 return -*(const unsigned char*)arg2. */
9579 if (p1 && *p1 == '\0'
9580 && TREE_CODE (len) == INTEGER_CST
9581 && tree_int_cst_sgn (len) == 1)
9583 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9584 tree cst_uchar_ptr_node
9585 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9587 tree temp = fold_convert_loc (loc, integer_type_node,
9588 build1 (INDIRECT_REF, cst_uchar_node,
9589 fold_convert_loc (loc,
9592 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9595 /* If len parameter is one, return an expression corresponding to
9596 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9597 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9599 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9600 tree cst_uchar_ptr_node
9601 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9603 tree ind1 = fold_convert_loc (loc, integer_type_node,
9604 build1 (INDIRECT_REF, cst_uchar_node,
9605 fold_convert_loc (loc,
9608 tree ind2 = fold_convert_loc (loc, integer_type_node,
9609 build1 (INDIRECT_REF, cst_uchar_node,
9610 fold_convert_loc (loc,
9613 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9619 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9620 ARG. Return NULL_TREE if no simplification can be made. */
9623 fold_builtin_signbit (location_t loc, tree arg, tree type)
9627 if (!validate_arg (arg, REAL_TYPE))
9630 /* If ARG is a compile-time constant, determine the result. */
9631 if (TREE_CODE (arg) == REAL_CST
9632 && !TREE_OVERFLOW (arg))
9636 c = TREE_REAL_CST (arg);
9637 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9638 return fold_convert_loc (loc, type, temp);
9641 /* If ARG is non-negative, the result is always zero. */
9642 if (tree_expr_nonnegative_p (arg))
9643 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9645 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9646 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9647 return fold_build2_loc (loc, LT_EXPR, type, arg,
9648 build_real (TREE_TYPE (arg), dconst0));
9653 /* Fold function call to builtin copysign, copysignf or copysignl with
9654 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9658 fold_builtin_copysign (location_t loc, tree fndecl,
9659 tree arg1, tree arg2, tree type)
9663 if (!validate_arg (arg1, REAL_TYPE)
9664 || !validate_arg (arg2, REAL_TYPE))
9667 /* copysign(X,X) is X. */
9668 if (operand_equal_p (arg1, arg2, 0))
9669 return fold_convert_loc (loc, type, arg1);
9671 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9672 if (TREE_CODE (arg1) == REAL_CST
9673 && TREE_CODE (arg2) == REAL_CST
9674 && !TREE_OVERFLOW (arg1)
9675 && !TREE_OVERFLOW (arg2))
9677 REAL_VALUE_TYPE c1, c2;
9679 c1 = TREE_REAL_CST (arg1);
9680 c2 = TREE_REAL_CST (arg2);
9681 /* c1.sign := c2.sign. */
9682 real_copysign (&c1, &c2);
9683 return build_real (type, c1);
9686 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9687 Remember to evaluate Y for side-effects. */
9688 if (tree_expr_nonnegative_p (arg2))
9689 return omit_one_operand_loc (loc, type,
9690 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9693 /* Strip sign changing operations for the first argument. */
9694 tem = fold_strip_sign_ops (arg1);
9696 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9701 /* Fold a call to builtin isascii with argument ARG. */
9704 fold_builtin_isascii (location_t loc, tree arg)
9706 if (!validate_arg (arg, INTEGER_TYPE))
9710 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9711 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9712 build_int_cst (NULL_TREE,
9713 ~ (unsigned HOST_WIDE_INT) 0x7f));
9714 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9715 arg, integer_zero_node);
9719 /* Fold a call to builtin toascii with argument ARG. */
9722 fold_builtin_toascii (location_t loc, tree arg)
9724 if (!validate_arg (arg, INTEGER_TYPE))
9727 /* Transform toascii(c) -> (c & 0x7f). */
9728 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9729 build_int_cst (NULL_TREE, 0x7f));
9732 /* Fold a call to builtin isdigit with argument ARG. */
9735 fold_builtin_isdigit (location_t loc, tree arg)
9737 if (!validate_arg (arg, INTEGER_TYPE))
9741 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9742 /* According to the C standard, isdigit is unaffected by locale.
9743 However, it definitely is affected by the target character set. */
9744 unsigned HOST_WIDE_INT target_digit0
9745 = lang_hooks.to_target_charset ('0');
9747 if (target_digit0 == 0)
9750 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9751 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9752 build_int_cst (unsigned_type_node, target_digit0));
9753 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9754 build_int_cst (unsigned_type_node, 9));
9758 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9761 fold_builtin_fabs (location_t loc, tree arg, tree type)
9763 if (!validate_arg (arg, REAL_TYPE))
9766 arg = fold_convert_loc (loc, type, arg);
9767 if (TREE_CODE (arg) == REAL_CST)
9768 return fold_abs_const (arg, type);
9769 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9772 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9775 fold_builtin_abs (location_t loc, tree arg, tree type)
9777 if (!validate_arg (arg, INTEGER_TYPE))
9780 arg = fold_convert_loc (loc, type, arg);
9781 if (TREE_CODE (arg) == INTEGER_CST)
9782 return fold_abs_const (arg, type);
9783 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9786 /* Fold a call to builtin fmin or fmax. */
9789 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9790 tree type, bool max)
9792 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9794 /* Calculate the result when the argument is a constant. */
9795 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9800 /* If either argument is NaN, return the other one. Avoid the
9801 transformation if we get (and honor) a signalling NaN. Using
9802 omit_one_operand() ensures we create a non-lvalue. */
9803 if (TREE_CODE (arg0) == REAL_CST
9804 && real_isnan (&TREE_REAL_CST (arg0))
9805 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9806 || ! TREE_REAL_CST (arg0).signalling))
9807 return omit_one_operand_loc (loc, type, arg1, arg0);
9808 if (TREE_CODE (arg1) == REAL_CST
9809 && real_isnan (&TREE_REAL_CST (arg1))
9810 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9811 || ! TREE_REAL_CST (arg1).signalling))
9812 return omit_one_operand_loc (loc, type, arg0, arg1);
9814 /* Transform fmin/fmax(x,x) -> x. */
9815 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9816 return omit_one_operand_loc (loc, type, arg0, arg1);
9818 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9819 functions to return the numeric arg if the other one is NaN.
9820 These tree codes don't honor that, so only transform if
9821 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9822 handled, so we don't have to worry about it either. */
9823 if (flag_finite_math_only)
9824 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9825 fold_convert_loc (loc, type, arg0),
9826 fold_convert_loc (loc, type, arg1));
9831 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9834 fold_builtin_carg (location_t loc, tree arg, tree type)
9836 if (validate_arg (arg, COMPLEX_TYPE)
9837 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9839 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9843 tree new_arg = builtin_save_expr (arg);
9844 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9845 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9846 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9853 /* Fold a call to builtin logb/ilogb. */
9856 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9858 if (! validate_arg (arg, REAL_TYPE))
9863 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9865 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9871 /* If arg is Inf or NaN and we're logb, return it. */
9872 if (TREE_CODE (rettype) == REAL_TYPE)
9873 return fold_convert_loc (loc, rettype, arg);
9874 /* Fall through... */
9876 /* Zero may set errno and/or raise an exception for logb, also
9877 for ilogb we don't know FP_ILOGB0. */
9880 /* For normal numbers, proceed iff radix == 2. In GCC,
9881 normalized significands are in the range [0.5, 1.0). We
9882 want the exponent as if they were [1.0, 2.0) so get the
9883 exponent and subtract 1. */
9884 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9885 return fold_convert_loc (loc, rettype,
9886 build_int_cst (NULL_TREE,
9887 REAL_EXP (value)-1));
9895 /* Fold a call to builtin significand, if radix == 2. */
9898 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9900 if (! validate_arg (arg, REAL_TYPE))
9905 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9907 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9914 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9915 return fold_convert_loc (loc, rettype, arg);
9917 /* For normal numbers, proceed iff radix == 2. */
9918 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9920 REAL_VALUE_TYPE result = *value;
9921 /* In GCC, normalized significands are in the range [0.5,
9922 1.0). We want them to be [1.0, 2.0) so set the
9924 SET_REAL_EXP (&result, 1);
9925 return build_real (rettype, result);
9934 /* Fold a call to builtin frexp, we can assume the base is 2. */
9937 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9939 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9944 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9947 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9949 /* Proceed if a valid pointer type was passed in. */
9950 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9952 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9958 /* For +-0, return (*exp = 0, +-0). */
9959 exp = integer_zero_node;
9964 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9965 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9968 /* Since the frexp function always expects base 2, and in
9969 GCC normalized significands are already in the range
9970 [0.5, 1.0), we have exactly what frexp wants. */
9971 REAL_VALUE_TYPE frac_rvt = *value;
9972 SET_REAL_EXP (&frac_rvt, 0);
9973 frac = build_real (rettype, frac_rvt);
9974 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9981 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9982 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9983 TREE_SIDE_EFFECTS (arg1) = 1;
9984 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9990 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9991 then we can assume the base is two. If it's false, then we have to
9992 check the mode of the TYPE parameter in certain cases. */
9995 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9996 tree type, bool ldexp)
9998 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
10003 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
10004 if (real_zerop (arg0) || integer_zerop (arg1)
10005 || (TREE_CODE (arg0) == REAL_CST
10006 && !real_isfinite (&TREE_REAL_CST (arg0))))
10007 return omit_one_operand_loc (loc, type, arg0, arg1);
10009 /* If both arguments are constant, then try to evaluate it. */
10010 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
10011 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10012 && host_integerp (arg1, 0))
10014 /* Bound the maximum adjustment to twice the range of the
10015 mode's valid exponents. Use abs to ensure the range is
10016 positive as a sanity check. */
10017 const long max_exp_adj = 2 *
10018 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
10019 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
10021 /* Get the user-requested adjustment. */
10022 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
10024 /* The requested adjustment must be inside this range. This
10025 is a preliminary cap to avoid things like overflow, we
10026 may still fail to compute the result for other reasons. */
10027 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
10029 REAL_VALUE_TYPE initial_result;
10031 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
10033 /* Ensure we didn't overflow. */
10034 if (! real_isinf (&initial_result))
10036 const REAL_VALUE_TYPE trunc_result
10037 = real_value_truncate (TYPE_MODE (type), initial_result);
10039 /* Only proceed if the target mode can hold the
10040 resulting value. */
10041 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
10042 return build_real (type, trunc_result);
10051 /* Fold a call to builtin modf. */
10054 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
10056 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10061 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10064 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10066 /* Proceed if a valid pointer type was passed in. */
10067 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
10069 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10070 REAL_VALUE_TYPE trunc, frac;
10076 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
10077 trunc = frac = *value;
10080 /* For +-Inf, return (*arg1 = arg0, +-0). */
10082 frac.sign = value->sign;
10086 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
10087 real_trunc (&trunc, VOIDmode, value);
10088 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
10089 /* If the original number was negative and already
10090 integral, then the fractional part is -0.0. */
10091 if (value->sign && frac.cl == rvc_zero)
10092 frac.sign = value->sign;
10096 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10097 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
10098 build_real (rettype, trunc));
10099 TREE_SIDE_EFFECTS (arg1) = 1;
10100 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
10101 build_real (rettype, frac));
10107 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10108 ARG is the argument for the call. */
10111 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10113 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10116 if (!validate_arg (arg, REAL_TYPE))
10119 switch (builtin_index)
10121 case BUILT_IN_ISINF:
10122 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10123 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10125 if (TREE_CODE (arg) == REAL_CST)
10127 r = TREE_REAL_CST (arg);
10128 if (real_isinf (&r))
10129 return real_compare (GT_EXPR, &r, &dconst0)
10130 ? integer_one_node : integer_minus_one_node;
10132 return integer_zero_node;
10137 case BUILT_IN_ISINF_SIGN:
10139 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10140 /* In a boolean context, GCC will fold the inner COND_EXPR to
10141 1. So e.g. "if (isinf_sign(x))" would be folded to just
10142 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10143 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10144 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
10145 tree tmp = NULL_TREE;
10147 arg = builtin_save_expr (arg);
10149 if (signbit_fn && isinf_fn)
10151 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10152 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10154 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10155 signbit_call, integer_zero_node);
10156 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10157 isinf_call, integer_zero_node);
10159 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10160 integer_minus_one_node, integer_one_node);
10161 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10163 integer_zero_node);
10169 case BUILT_IN_ISFINITE:
10170 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10171 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10172 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10174 if (TREE_CODE (arg) == REAL_CST)
10176 r = TREE_REAL_CST (arg);
10177 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10182 case BUILT_IN_ISNAN:
10183 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10184 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10186 if (TREE_CODE (arg) == REAL_CST)
10188 r = TREE_REAL_CST (arg);
10189 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10192 arg = builtin_save_expr (arg);
10193 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10196 gcc_unreachable ();
10200 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10201 This builtin will generate code to return the appropriate floating
10202 point classification depending on the value of the floating point
10203 number passed in. The possible return values must be supplied as
10204 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10205 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10206 one floating point argument which is "type generic". */
10209 fold_builtin_fpclassify (location_t loc, tree exp)
10211 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10212 arg, type, res, tmp;
10213 enum machine_mode mode;
10217 /* Verify the required arguments in the original call. */
10218 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10219 INTEGER_TYPE, INTEGER_TYPE,
10220 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10223 fp_nan = CALL_EXPR_ARG (exp, 0);
10224 fp_infinite = CALL_EXPR_ARG (exp, 1);
10225 fp_normal = CALL_EXPR_ARG (exp, 2);
10226 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10227 fp_zero = CALL_EXPR_ARG (exp, 4);
10228 arg = CALL_EXPR_ARG (exp, 5);
10229 type = TREE_TYPE (arg);
10230 mode = TYPE_MODE (type);
10231 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10233 /* fpclassify(x) ->
10234 isnan(x) ? FP_NAN :
10235 (fabs(x) == Inf ? FP_INFINITE :
10236 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10237 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10239 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10240 build_real (type, dconst0));
10241 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10242 tmp, fp_zero, fp_subnormal);
10244 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10245 real_from_string (&r, buf);
10246 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10247 arg, build_real (type, r));
10248 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10250 if (HONOR_INFINITIES (mode))
10253 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10254 build_real (type, r));
10255 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10259 if (HONOR_NANS (mode))
10261 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10262 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10268 /* Fold a call to an unordered comparison function such as
10269 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10270 being called and ARG0 and ARG1 are the arguments for the call.
10271 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10272 the opposite of the desired result. UNORDERED_CODE is used
10273 for modes that can hold NaNs and ORDERED_CODE is used for
10277 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10278 enum tree_code unordered_code,
10279 enum tree_code ordered_code)
10281 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10282 enum tree_code code;
10284 enum tree_code code0, code1;
10285 tree cmp_type = NULL_TREE;
10287 type0 = TREE_TYPE (arg0);
10288 type1 = TREE_TYPE (arg1);
10290 code0 = TREE_CODE (type0);
10291 code1 = TREE_CODE (type1);
10293 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10294 /* Choose the wider of two real types. */
10295 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10297 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10299 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10302 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10303 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10305 if (unordered_code == UNORDERED_EXPR)
10307 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10308 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10309 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10312 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10314 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10315 fold_build2_loc (loc, code, type, arg0, arg1));
10318 /* Fold a call to built-in function FNDECL with 0 arguments.
10319 IGNORE is true if the result of the function call is ignored. This
10320 function returns NULL_TREE if no simplification was possible. */
10323 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10325 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10326 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10329 CASE_FLT_FN (BUILT_IN_INF):
10330 case BUILT_IN_INFD32:
10331 case BUILT_IN_INFD64:
10332 case BUILT_IN_INFD128:
10333 return fold_builtin_inf (loc, type, true);
10335 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10336 return fold_builtin_inf (loc, type, false);
10338 case BUILT_IN_CLASSIFY_TYPE:
10339 return fold_builtin_classify_type (NULL_TREE);
10347 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10348 IGNORE is true if the result of the function call is ignored. This
10349 function returns NULL_TREE if no simplification was possible. */
10352 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10354 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10355 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10359 case BUILT_IN_CONSTANT_P:
10361 tree val = fold_builtin_constant_p (arg0);
10363 /* Gimplification will pull the CALL_EXPR for the builtin out of
10364 an if condition. When not optimizing, we'll not CSE it back.
10365 To avoid link error types of regressions, return false now. */
10366 if (!val && !optimize)
10367 val = integer_zero_node;
10372 case BUILT_IN_CLASSIFY_TYPE:
10373 return fold_builtin_classify_type (arg0);
10375 case BUILT_IN_STRLEN:
10376 return fold_builtin_strlen (loc, arg0);
10378 CASE_FLT_FN (BUILT_IN_FABS):
10379 return fold_builtin_fabs (loc, arg0, type);
10382 case BUILT_IN_LABS:
10383 case BUILT_IN_LLABS:
10384 case BUILT_IN_IMAXABS:
10385 return fold_builtin_abs (loc, arg0, type);
10387 CASE_FLT_FN (BUILT_IN_CONJ):
10388 if (validate_arg (arg0, COMPLEX_TYPE)
10389 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10390 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10393 CASE_FLT_FN (BUILT_IN_CREAL):
10394 if (validate_arg (arg0, COMPLEX_TYPE)
10395 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10396 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10399 CASE_FLT_FN (BUILT_IN_CIMAG):
10400 if (validate_arg (arg0, COMPLEX_TYPE))
10401 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10404 CASE_FLT_FN (BUILT_IN_CCOS):
10405 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10407 CASE_FLT_FN (BUILT_IN_CCOSH):
10408 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10411 CASE_FLT_FN (BUILT_IN_CSIN):
10412 if (validate_arg (arg0, COMPLEX_TYPE)
10413 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10414 return do_mpc_arg1 (arg0, type, mpc_sin);
10417 CASE_FLT_FN (BUILT_IN_CSINH):
10418 if (validate_arg (arg0, COMPLEX_TYPE)
10419 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10420 return do_mpc_arg1 (arg0, type, mpc_sinh);
10423 CASE_FLT_FN (BUILT_IN_CTAN):
10424 if (validate_arg (arg0, COMPLEX_TYPE)
10425 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10426 return do_mpc_arg1 (arg0, type, mpc_tan);
10429 CASE_FLT_FN (BUILT_IN_CTANH):
10430 if (validate_arg (arg0, COMPLEX_TYPE)
10431 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10432 return do_mpc_arg1 (arg0, type, mpc_tanh);
10435 CASE_FLT_FN (BUILT_IN_CLOG):
10436 if (validate_arg (arg0, COMPLEX_TYPE)
10437 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10438 return do_mpc_arg1 (arg0, type, mpc_log);
10441 CASE_FLT_FN (BUILT_IN_CSQRT):
10442 if (validate_arg (arg0, COMPLEX_TYPE)
10443 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10444 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10448 CASE_FLT_FN (BUILT_IN_CABS):
10449 return fold_builtin_cabs (loc, arg0, type, fndecl);
10451 CASE_FLT_FN (BUILT_IN_CARG):
10452 return fold_builtin_carg (loc, arg0, type);
10454 CASE_FLT_FN (BUILT_IN_SQRT):
10455 return fold_builtin_sqrt (loc, arg0, type);
10457 CASE_FLT_FN (BUILT_IN_CBRT):
10458 return fold_builtin_cbrt (loc, arg0, type);
10460 CASE_FLT_FN (BUILT_IN_ASIN):
10461 if (validate_arg (arg0, REAL_TYPE))
10462 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10463 &dconstm1, &dconst1, true);
10466 CASE_FLT_FN (BUILT_IN_ACOS):
10467 if (validate_arg (arg0, REAL_TYPE))
10468 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10469 &dconstm1, &dconst1, true);
10472 CASE_FLT_FN (BUILT_IN_ATAN):
10473 if (validate_arg (arg0, REAL_TYPE))
10474 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10477 CASE_FLT_FN (BUILT_IN_ASINH):
10478 if (validate_arg (arg0, REAL_TYPE))
10479 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10482 CASE_FLT_FN (BUILT_IN_ACOSH):
10483 if (validate_arg (arg0, REAL_TYPE))
10484 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10485 &dconst1, NULL, true);
10488 CASE_FLT_FN (BUILT_IN_ATANH):
10489 if (validate_arg (arg0, REAL_TYPE))
10490 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10491 &dconstm1, &dconst1, false);
10494 CASE_FLT_FN (BUILT_IN_SIN):
10495 if (validate_arg (arg0, REAL_TYPE))
10496 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10499 CASE_FLT_FN (BUILT_IN_COS):
10500 return fold_builtin_cos (loc, arg0, type, fndecl);
10502 CASE_FLT_FN (BUILT_IN_TAN):
10503 return fold_builtin_tan (arg0, type);
10505 CASE_FLT_FN (BUILT_IN_CEXP):
10506 return fold_builtin_cexp (loc, arg0, type);
10508 CASE_FLT_FN (BUILT_IN_CEXPI):
10509 if (validate_arg (arg0, REAL_TYPE))
10510 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10513 CASE_FLT_FN (BUILT_IN_SINH):
10514 if (validate_arg (arg0, REAL_TYPE))
10515 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10518 CASE_FLT_FN (BUILT_IN_COSH):
10519 return fold_builtin_cosh (loc, arg0, type, fndecl);
10521 CASE_FLT_FN (BUILT_IN_TANH):
10522 if (validate_arg (arg0, REAL_TYPE))
10523 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10526 CASE_FLT_FN (BUILT_IN_ERF):
10527 if (validate_arg (arg0, REAL_TYPE))
10528 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10531 CASE_FLT_FN (BUILT_IN_ERFC):
10532 if (validate_arg (arg0, REAL_TYPE))
10533 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10536 CASE_FLT_FN (BUILT_IN_TGAMMA):
10537 if (validate_arg (arg0, REAL_TYPE))
10538 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10541 CASE_FLT_FN (BUILT_IN_EXP):
10542 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10544 CASE_FLT_FN (BUILT_IN_EXP2):
10545 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10547 CASE_FLT_FN (BUILT_IN_EXP10):
10548 CASE_FLT_FN (BUILT_IN_POW10):
10549 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10551 CASE_FLT_FN (BUILT_IN_EXPM1):
10552 if (validate_arg (arg0, REAL_TYPE))
10553 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10556 CASE_FLT_FN (BUILT_IN_LOG):
10557 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10559 CASE_FLT_FN (BUILT_IN_LOG2):
10560 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10562 CASE_FLT_FN (BUILT_IN_LOG10):
10563 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10565 CASE_FLT_FN (BUILT_IN_LOG1P):
10566 if (validate_arg (arg0, REAL_TYPE))
10567 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10568 &dconstm1, NULL, false);
10571 CASE_FLT_FN (BUILT_IN_J0):
10572 if (validate_arg (arg0, REAL_TYPE))
10573 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10577 CASE_FLT_FN (BUILT_IN_J1):
10578 if (validate_arg (arg0, REAL_TYPE))
10579 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10583 CASE_FLT_FN (BUILT_IN_Y0):
10584 if (validate_arg (arg0, REAL_TYPE))
10585 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10586 &dconst0, NULL, false);
10589 CASE_FLT_FN (BUILT_IN_Y1):
10590 if (validate_arg (arg0, REAL_TYPE))
10591 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10592 &dconst0, NULL, false);
10595 CASE_FLT_FN (BUILT_IN_NAN):
10596 case BUILT_IN_NAND32:
10597 case BUILT_IN_NAND64:
10598 case BUILT_IN_NAND128:
10599 return fold_builtin_nan (arg0, type, true);
10601 CASE_FLT_FN (BUILT_IN_NANS):
10602 return fold_builtin_nan (arg0, type, false);
10604 CASE_FLT_FN (BUILT_IN_FLOOR):
10605 return fold_builtin_floor (loc, fndecl, arg0);
10607 CASE_FLT_FN (BUILT_IN_CEIL):
10608 return fold_builtin_ceil (loc, fndecl, arg0);
10610 CASE_FLT_FN (BUILT_IN_TRUNC):
10611 return fold_builtin_trunc (loc, fndecl, arg0);
10613 CASE_FLT_FN (BUILT_IN_ROUND):
10614 return fold_builtin_round (loc, fndecl, arg0);
10616 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10617 CASE_FLT_FN (BUILT_IN_RINT):
10618 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10620 CASE_FLT_FN (BUILT_IN_LCEIL):
10621 CASE_FLT_FN (BUILT_IN_LLCEIL):
10622 CASE_FLT_FN (BUILT_IN_LFLOOR):
10623 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10624 CASE_FLT_FN (BUILT_IN_LROUND):
10625 CASE_FLT_FN (BUILT_IN_LLROUND):
10626 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10628 CASE_FLT_FN (BUILT_IN_LRINT):
10629 CASE_FLT_FN (BUILT_IN_LLRINT):
10630 return fold_fixed_mathfn (loc, fndecl, arg0);
10632 case BUILT_IN_BSWAP32:
10633 case BUILT_IN_BSWAP64:
10634 return fold_builtin_bswap (fndecl, arg0);
10636 CASE_INT_FN (BUILT_IN_FFS):
10637 CASE_INT_FN (BUILT_IN_CLZ):
10638 CASE_INT_FN (BUILT_IN_CTZ):
10639 CASE_INT_FN (BUILT_IN_POPCOUNT):
10640 CASE_INT_FN (BUILT_IN_PARITY):
10641 return fold_builtin_bitop (fndecl, arg0);
10643 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10644 return fold_builtin_signbit (loc, arg0, type);
10646 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10647 return fold_builtin_significand (loc, arg0, type);
10649 CASE_FLT_FN (BUILT_IN_ILOGB):
10650 CASE_FLT_FN (BUILT_IN_LOGB):
10651 return fold_builtin_logb (loc, arg0, type);
10653 case BUILT_IN_ISASCII:
10654 return fold_builtin_isascii (loc, arg0);
10656 case BUILT_IN_TOASCII:
10657 return fold_builtin_toascii (loc, arg0);
10659 case BUILT_IN_ISDIGIT:
10660 return fold_builtin_isdigit (loc, arg0);
10662 CASE_FLT_FN (BUILT_IN_FINITE):
10663 case BUILT_IN_FINITED32:
10664 case BUILT_IN_FINITED64:
10665 case BUILT_IN_FINITED128:
10666 case BUILT_IN_ISFINITE:
10667 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10669 CASE_FLT_FN (BUILT_IN_ISINF):
10670 case BUILT_IN_ISINFD32:
10671 case BUILT_IN_ISINFD64:
10672 case BUILT_IN_ISINFD128:
10673 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10675 case BUILT_IN_ISINF_SIGN:
10676 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10678 CASE_FLT_FN (BUILT_IN_ISNAN):
10679 case BUILT_IN_ISNAND32:
10680 case BUILT_IN_ISNAND64:
10681 case BUILT_IN_ISNAND128:
10682 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10684 case BUILT_IN_PRINTF:
10685 case BUILT_IN_PRINTF_UNLOCKED:
10686 case BUILT_IN_VPRINTF:
10687 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10697 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10698 IGNORE is true if the result of the function call is ignored. This
10699 function returns NULL_TREE if no simplification was possible. */
10702 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10704 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10705 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10709 CASE_FLT_FN (BUILT_IN_JN):
10710 if (validate_arg (arg0, INTEGER_TYPE)
10711 && validate_arg (arg1, REAL_TYPE))
10712 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10715 CASE_FLT_FN (BUILT_IN_YN):
10716 if (validate_arg (arg0, INTEGER_TYPE)
10717 && validate_arg (arg1, REAL_TYPE))
10718 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10722 CASE_FLT_FN (BUILT_IN_DREM):
10723 CASE_FLT_FN (BUILT_IN_REMAINDER):
10724 if (validate_arg (arg0, REAL_TYPE)
10725 && validate_arg(arg1, REAL_TYPE))
10726 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10729 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10730 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10731 if (validate_arg (arg0, REAL_TYPE)
10732 && validate_arg(arg1, POINTER_TYPE))
10733 return do_mpfr_lgamma_r (arg0, arg1, type);
10736 CASE_FLT_FN (BUILT_IN_ATAN2):
10737 if (validate_arg (arg0, REAL_TYPE)
10738 && validate_arg(arg1, REAL_TYPE))
10739 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10742 CASE_FLT_FN (BUILT_IN_FDIM):
10743 if (validate_arg (arg0, REAL_TYPE)
10744 && validate_arg(arg1, REAL_TYPE))
10745 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10748 CASE_FLT_FN (BUILT_IN_HYPOT):
10749 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10751 #ifdef HAVE_mpc_pow
10752 CASE_FLT_FN (BUILT_IN_CPOW):
10753 if (validate_arg (arg0, COMPLEX_TYPE)
10754 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10755 && validate_arg (arg1, COMPLEX_TYPE)
10756 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10757 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10761 CASE_FLT_FN (BUILT_IN_LDEXP):
10762 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10763 CASE_FLT_FN (BUILT_IN_SCALBN):
10764 CASE_FLT_FN (BUILT_IN_SCALBLN):
10765 return fold_builtin_load_exponent (loc, arg0, arg1,
10766 type, /*ldexp=*/false);
10768 CASE_FLT_FN (BUILT_IN_FREXP):
10769 return fold_builtin_frexp (loc, arg0, arg1, type);
10771 CASE_FLT_FN (BUILT_IN_MODF):
10772 return fold_builtin_modf (loc, arg0, arg1, type);
10774 case BUILT_IN_BZERO:
10775 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10777 case BUILT_IN_FPUTS:
10778 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10780 case BUILT_IN_FPUTS_UNLOCKED:
10781 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10783 case BUILT_IN_STRSTR:
10784 return fold_builtin_strstr (loc, arg0, arg1, type);
10786 case BUILT_IN_STRCAT:
10787 return fold_builtin_strcat (loc, arg0, arg1);
10789 case BUILT_IN_STRSPN:
10790 return fold_builtin_strspn (loc, arg0, arg1);
10792 case BUILT_IN_STRCSPN:
10793 return fold_builtin_strcspn (loc, arg0, arg1);
10795 case BUILT_IN_STRCHR:
10796 case BUILT_IN_INDEX:
10797 return fold_builtin_strchr (loc, arg0, arg1, type);
10799 case BUILT_IN_STRRCHR:
10800 case BUILT_IN_RINDEX:
10801 return fold_builtin_strrchr (loc, arg0, arg1, type);
10803 case BUILT_IN_STRCPY:
10804 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10806 case BUILT_IN_STPCPY:
10809 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10813 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10817 case BUILT_IN_STRCMP:
10818 return fold_builtin_strcmp (loc, arg0, arg1);
10820 case BUILT_IN_STRPBRK:
10821 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10823 case BUILT_IN_EXPECT:
10824 return fold_builtin_expect (loc, arg0, arg1);
10826 CASE_FLT_FN (BUILT_IN_POW):
10827 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10829 CASE_FLT_FN (BUILT_IN_POWI):
10830 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10832 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10833 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10835 CASE_FLT_FN (BUILT_IN_FMIN):
10836 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10838 CASE_FLT_FN (BUILT_IN_FMAX):
10839 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10841 case BUILT_IN_ISGREATER:
10842 return fold_builtin_unordered_cmp (loc, fndecl,
10843 arg0, arg1, UNLE_EXPR, LE_EXPR);
10844 case BUILT_IN_ISGREATEREQUAL:
10845 return fold_builtin_unordered_cmp (loc, fndecl,
10846 arg0, arg1, UNLT_EXPR, LT_EXPR);
10847 case BUILT_IN_ISLESS:
10848 return fold_builtin_unordered_cmp (loc, fndecl,
10849 arg0, arg1, UNGE_EXPR, GE_EXPR);
10850 case BUILT_IN_ISLESSEQUAL:
10851 return fold_builtin_unordered_cmp (loc, fndecl,
10852 arg0, arg1, UNGT_EXPR, GT_EXPR);
10853 case BUILT_IN_ISLESSGREATER:
10854 return fold_builtin_unordered_cmp (loc, fndecl,
10855 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10856 case BUILT_IN_ISUNORDERED:
10857 return fold_builtin_unordered_cmp (loc, fndecl,
10858 arg0, arg1, UNORDERED_EXPR,
10861 /* We do the folding for va_start in the expander. */
10862 case BUILT_IN_VA_START:
10865 case BUILT_IN_SPRINTF:
10866 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10868 case BUILT_IN_OBJECT_SIZE:
10869 return fold_builtin_object_size (arg0, arg1);
10871 case BUILT_IN_PRINTF:
10872 case BUILT_IN_PRINTF_UNLOCKED:
10873 case BUILT_IN_VPRINTF:
10874 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10876 case BUILT_IN_PRINTF_CHK:
10877 case BUILT_IN_VPRINTF_CHK:
10878 if (!validate_arg (arg0, INTEGER_TYPE)
10879 || TREE_SIDE_EFFECTS (arg0))
10882 return fold_builtin_printf (loc, fndecl,
10883 arg1, NULL_TREE, ignore, fcode);
10886 case BUILT_IN_FPRINTF:
10887 case BUILT_IN_FPRINTF_UNLOCKED:
10888 case BUILT_IN_VFPRINTF:
10889 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10898 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10899 and ARG2. IGNORE is true if the result of the function call is ignored.
10900 This function returns NULL_TREE if no simplification was possible. */
10903 fold_builtin_3 (location_t loc, tree fndecl,
10904 tree arg0, tree arg1, tree arg2, bool ignore)
10906 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10907 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10911 CASE_FLT_FN (BUILT_IN_SINCOS):
10912 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10914 CASE_FLT_FN (BUILT_IN_FMA):
10915 if (validate_arg (arg0, REAL_TYPE)
10916 && validate_arg(arg1, REAL_TYPE)
10917 && validate_arg(arg2, REAL_TYPE))
10918 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10921 CASE_FLT_FN (BUILT_IN_REMQUO):
10922 if (validate_arg (arg0, REAL_TYPE)
10923 && validate_arg(arg1, REAL_TYPE)
10924 && validate_arg(arg2, POINTER_TYPE))
10925 return do_mpfr_remquo (arg0, arg1, arg2);
10928 case BUILT_IN_MEMSET:
10929 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10931 case BUILT_IN_BCOPY:
10932 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10933 void_type_node, true, /*endp=*/3);
10935 case BUILT_IN_MEMCPY:
10936 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10937 type, ignore, /*endp=*/0);
10939 case BUILT_IN_MEMPCPY:
10940 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10941 type, ignore, /*endp=*/1);
10943 case BUILT_IN_MEMMOVE:
10944 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10945 type, ignore, /*endp=*/3);
10947 case BUILT_IN_STRNCAT:
10948 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10950 case BUILT_IN_STRNCPY:
10951 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10953 case BUILT_IN_STRNCMP:
10954 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10956 case BUILT_IN_MEMCHR:
10957 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10959 case BUILT_IN_BCMP:
10960 case BUILT_IN_MEMCMP:
10961 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10963 case BUILT_IN_SPRINTF:
10964 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10966 case BUILT_IN_STRCPY_CHK:
10967 case BUILT_IN_STPCPY_CHK:
10968 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10971 case BUILT_IN_STRCAT_CHK:
10972 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10974 case BUILT_IN_PRINTF_CHK:
10975 case BUILT_IN_VPRINTF_CHK:
10976 if (!validate_arg (arg0, INTEGER_TYPE)
10977 || TREE_SIDE_EFFECTS (arg0))
10980 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10983 case BUILT_IN_FPRINTF:
10984 case BUILT_IN_FPRINTF_UNLOCKED:
10985 case BUILT_IN_VFPRINTF:
10986 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10989 case BUILT_IN_FPRINTF_CHK:
10990 case BUILT_IN_VFPRINTF_CHK:
10991 if (!validate_arg (arg1, INTEGER_TYPE)
10992 || TREE_SIDE_EFFECTS (arg1))
10995 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11004 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11005 ARG2, and ARG3. IGNORE is true if the result of the function call is
11006 ignored. This function returns NULL_TREE if no simplification was
11010 fold_builtin_4 (location_t loc, tree fndecl,
11011 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11013 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11017 case BUILT_IN_MEMCPY_CHK:
11018 case BUILT_IN_MEMPCPY_CHK:
11019 case BUILT_IN_MEMMOVE_CHK:
11020 case BUILT_IN_MEMSET_CHK:
11021 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11023 DECL_FUNCTION_CODE (fndecl));
11025 case BUILT_IN_STRNCPY_CHK:
11026 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
11028 case BUILT_IN_STRNCAT_CHK:
11029 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11031 case BUILT_IN_FPRINTF_CHK:
11032 case BUILT_IN_VFPRINTF_CHK:
11033 if (!validate_arg (arg1, INTEGER_TYPE)
11034 || TREE_SIDE_EFFECTS (arg1))
11037 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11047 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11048 arguments, where NARGS <= 4. IGNORE is true if the result of the
11049 function call is ignored. This function returns NULL_TREE if no
11050 simplification was possible. Note that this only folds builtins with
11051 fixed argument patterns. Foldings that do varargs-to-varargs
11052 transformations, or that match calls with more than 4 arguments,
11053 need to be handled with fold_builtin_varargs instead. */
11055 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11058 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11060 tree ret = NULL_TREE;
11065 ret = fold_builtin_0 (loc, fndecl, ignore);
11068 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11071 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11074 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11077 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11085 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11086 SET_EXPR_LOCATION (ret, loc);
11087 TREE_NO_WARNING (ret) = 1;
11093 /* Builtins with folding operations that operate on "..." arguments
11094 need special handling; we need to store the arguments in a convenient
11095 data structure before attempting any folding. Fortunately there are
11096 only a few builtins that fall into this category. FNDECL is the
11097 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11098 result of the function call is ignored. */
11101 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11102 bool ignore ATTRIBUTE_UNUSED)
11104 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11105 tree ret = NULL_TREE;
11109 case BUILT_IN_SPRINTF_CHK:
11110 case BUILT_IN_VSPRINTF_CHK:
11111 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11114 case BUILT_IN_SNPRINTF_CHK:
11115 case BUILT_IN_VSNPRINTF_CHK:
11116 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11119 case BUILT_IN_FPCLASSIFY:
11120 ret = fold_builtin_fpclassify (loc, exp);
11128 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11129 SET_EXPR_LOCATION (ret, loc);
11130 TREE_NO_WARNING (ret) = 1;
11136 /* Return true if FNDECL shouldn't be folded right now.
11137 If a built-in function has an inline attribute always_inline
11138 wrapper, defer folding it after always_inline functions have
11139 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11140 might not be performed. */
11143 avoid_folding_inline_builtin (tree fndecl)
11145 return (DECL_DECLARED_INLINE_P (fndecl)
11146 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11148 && !cfun->always_inline_functions_inlined
11149 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11152 /* A wrapper function for builtin folding that prevents warnings for
11153 "statement without effect" and the like, caused by removing the
11154 call node earlier than the warning is generated. */
11157 fold_call_expr (location_t loc, tree exp, bool ignore)
11159 tree ret = NULL_TREE;
11160 tree fndecl = get_callee_fndecl (exp);
11162 && TREE_CODE (fndecl) == FUNCTION_DECL
11163 && DECL_BUILT_IN (fndecl)
11164 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11165 yet. Defer folding until we see all the arguments
11166 (after inlining). */
11167 && !CALL_EXPR_VA_ARG_PACK (exp))
11169 int nargs = call_expr_nargs (exp);
11171 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11172 instead last argument is __builtin_va_arg_pack (). Defer folding
11173 even in that case, until arguments are finalized. */
11174 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11176 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11178 && TREE_CODE (fndecl2) == FUNCTION_DECL
11179 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11180 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11184 if (avoid_folding_inline_builtin (fndecl))
11187 /* FIXME: Don't use a list in this interface. */
11188 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11189 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
11192 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11194 tree *args = CALL_EXPR_ARGP (exp);
11195 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11198 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11206 /* Conveniently construct a function call expression. FNDECL names the
11207 function to be called and ARGLIST is a TREE_LIST of arguments. */
11210 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
11212 tree fntype = TREE_TYPE (fndecl);
11213 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11214 int n = list_length (arglist);
11215 tree *argarray = (tree *) alloca (n * sizeof (tree));
11218 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11219 argarray[i] = TREE_VALUE (arglist);
11220 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11223 /* Conveniently construct a function call expression. FNDECL names the
11224 function to be called, N is the number of arguments, and the "..."
11225 parameters are the argument expressions. */
11228 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11231 tree fntype = TREE_TYPE (fndecl);
11232 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11233 tree *argarray = (tree *) alloca (n * sizeof (tree));
11237 for (i = 0; i < n; i++)
11238 argarray[i] = va_arg (ap, tree);
11240 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11243 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11244 N arguments are passed in the array ARGARRAY. */
11247 fold_builtin_call_array (location_t loc, tree type,
11252 tree ret = NULL_TREE;
11256 if (TREE_CODE (fn) == ADDR_EXPR)
11258 tree fndecl = TREE_OPERAND (fn, 0);
11259 if (TREE_CODE (fndecl) == FUNCTION_DECL
11260 && DECL_BUILT_IN (fndecl))
11262 /* If last argument is __builtin_va_arg_pack (), arguments to this
11263 function are not finalized yet. Defer folding until they are. */
11264 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11266 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11268 && TREE_CODE (fndecl2) == FUNCTION_DECL
11269 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11270 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11271 return build_call_array_loc (loc, type, fn, n, argarray);
11273 if (avoid_folding_inline_builtin (fndecl))
11274 return build_call_array_loc (loc, type, fn, n, argarray);
11275 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11277 tree arglist = NULL_TREE;
11278 for (i = n - 1; i >= 0; i--)
11279 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11280 ret = targetm.fold_builtin (fndecl, arglist, false);
11283 return build_call_array_loc (loc, type, fn, n, argarray);
11285 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11287 /* First try the transformations that don't require consing up
11289 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11294 /* If we got this far, we need to build an exp. */
11295 exp = build_call_array_loc (loc, type, fn, n, argarray);
11296 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11297 return ret ? ret : exp;
11301 return build_call_array_loc (loc, type, fn, n, argarray);
11304 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11305 along with N new arguments specified as the "..." parameters. SKIP
11306 is the number of arguments in EXP to be omitted. This function is used
11307 to do varargs-to-varargs transformations. */
11310 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11312 int oldnargs = call_expr_nargs (exp);
11313 int nargs = oldnargs - skip + n;
11314 tree fntype = TREE_TYPE (fndecl);
11315 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11323 buffer = XALLOCAVEC (tree, nargs);
11325 for (i = 0; i < n; i++)
11326 buffer[i] = va_arg (ap, tree);
11328 for (j = skip; j < oldnargs; j++, i++)
11329 buffer[i] = CALL_EXPR_ARG (exp, j);
11332 buffer = CALL_EXPR_ARGP (exp) + skip;
11334 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
11337 /* Validate a single argument ARG against a tree code CODE representing
11341 validate_arg (const_tree arg, enum tree_code code)
11345 else if (code == POINTER_TYPE)
11346 return POINTER_TYPE_P (TREE_TYPE (arg));
11347 else if (code == INTEGER_TYPE)
11348 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11349 return code == TREE_CODE (TREE_TYPE (arg));
11352 /* This function validates the types of a function call argument list
11353 against a specified list of tree_codes. If the last specifier is a 0,
11354 that represents an ellipses, otherwise the last specifier must be a
11357 This is the GIMPLE version of validate_arglist. Eventually we want to
11358 completely convert builtins.c to work from GIMPLEs and the tree based
11359 validate_arglist will then be removed. */
11362 validate_gimple_arglist (const_gimple call, ...)
11364 enum tree_code code;
11370 va_start (ap, call);
11375 code = (enum tree_code) va_arg (ap, int);
11379 /* This signifies an ellipses, any further arguments are all ok. */
11383 /* This signifies an endlink, if no arguments remain, return
11384 true, otherwise return false. */
11385 res = (i == gimple_call_num_args (call));
11388 /* If no parameters remain or the parameter's code does not
11389 match the specified code, return false. Otherwise continue
11390 checking any remaining arguments. */
11391 arg = gimple_call_arg (call, i++);
11392 if (!validate_arg (arg, code))
11399 /* We need gotos here since we can only have one VA_CLOSE in a
11407 /* This function validates the types of a function call argument list
11408 against a specified list of tree_codes. If the last specifier is a 0,
11409 that represents an ellipses, otherwise the last specifier must be a
11413 validate_arglist (const_tree callexpr, ...)
11415 enum tree_code code;
11418 const_call_expr_arg_iterator iter;
11421 va_start (ap, callexpr);
11422 init_const_call_expr_arg_iterator (callexpr, &iter);
11426 code = (enum tree_code) va_arg (ap, int);
11430 /* This signifies an ellipses, any further arguments are all ok. */
11434 /* This signifies an endlink, if no arguments remain, return
11435 true, otherwise return false. */
11436 res = !more_const_call_expr_args_p (&iter);
11439 /* If no parameters remain or the parameter's code does not
11440 match the specified code, return false. Otherwise continue
11441 checking any remaining arguments. */
11442 arg = next_const_call_expr_arg (&iter);
11443 if (!validate_arg (arg, code))
11450 /* We need gotos here since we can only have one VA_CLOSE in a
11458 /* Default target-specific builtin expander that does nothing. */
11461 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11462 rtx target ATTRIBUTE_UNUSED,
11463 rtx subtarget ATTRIBUTE_UNUSED,
11464 enum machine_mode mode ATTRIBUTE_UNUSED,
11465 int ignore ATTRIBUTE_UNUSED)
11470 /* Returns true is EXP represents data that would potentially reside
11471 in a readonly section. */
11474 readonly_data_expr (tree exp)
11478 if (TREE_CODE (exp) != ADDR_EXPR)
11481 exp = get_base_address (TREE_OPERAND (exp, 0));
11485 /* Make sure we call decl_readonly_section only for trees it
11486 can handle (since it returns true for everything it doesn't
11488 if (TREE_CODE (exp) == STRING_CST
11489 || TREE_CODE (exp) == CONSTRUCTOR
11490 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11491 return decl_readonly_section (exp, 0);
11496 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11497 to the call, and TYPE is its return type.
11499 Return NULL_TREE if no simplification was possible, otherwise return the
11500 simplified form of the call as a tree.
11502 The simplified form may be a constant or other expression which
11503 computes the same value, but in a more efficient manner (including
11504 calls to other builtin functions).
11506 The call may contain arguments which need to be evaluated, but
11507 which are not useful to determine the result of the call. In
11508 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11509 COMPOUND_EXPR will be an argument which must be evaluated.
11510 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11511 COMPOUND_EXPR in the chain will contain the tree for the simplified
11512 form of the builtin function call. */
11515 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11517 if (!validate_arg (s1, POINTER_TYPE)
11518 || !validate_arg (s2, POINTER_TYPE))
11523 const char *p1, *p2;
11525 p2 = c_getstr (s2);
11529 p1 = c_getstr (s1);
11532 const char *r = strstr (p1, p2);
11536 return build_int_cst (TREE_TYPE (s1), 0);
11538 /* Return an offset into the constant string argument. */
11539 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11540 s1, size_int (r - p1));
11541 return fold_convert_loc (loc, type, tem);
11544 /* The argument is const char *, and the result is char *, so we need
11545 a type conversion here to avoid a warning. */
11547 return fold_convert_loc (loc, type, s1);
11552 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11556 /* New argument list transforming strstr(s1, s2) to
11557 strchr(s1, s2[0]). */
11558 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11562 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11563 the call, and TYPE is its return type.
11565 Return NULL_TREE if no simplification was possible, otherwise return the
11566 simplified form of the call as a tree.
11568 The simplified form may be a constant or other expression which
11569 computes the same value, but in a more efficient manner (including
11570 calls to other builtin functions).
11572 The call may contain arguments which need to be evaluated, but
11573 which are not useful to determine the result of the call. In
11574 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11575 COMPOUND_EXPR will be an argument which must be evaluated.
11576 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11577 COMPOUND_EXPR in the chain will contain the tree for the simplified
11578 form of the builtin function call. */
11581 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11583 if (!validate_arg (s1, POINTER_TYPE)
11584 || !validate_arg (s2, INTEGER_TYPE))
11590 if (TREE_CODE (s2) != INTEGER_CST)
11593 p1 = c_getstr (s1);
11600 if (target_char_cast (s2, &c))
11603 r = strchr (p1, c);
11606 return build_int_cst (TREE_TYPE (s1), 0);
11608 /* Return an offset into the constant string argument. */
11609 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11610 s1, size_int (r - p1));
11611 return fold_convert_loc (loc, type, tem);
11617 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11618 the call, and TYPE is its return type.
11620 Return NULL_TREE if no simplification was possible, otherwise return the
11621 simplified form of the call as a tree.
11623 The simplified form may be a constant or other expression which
11624 computes the same value, but in a more efficient manner (including
11625 calls to other builtin functions).
11627 The call may contain arguments which need to be evaluated, but
11628 which are not useful to determine the result of the call. In
11629 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11630 COMPOUND_EXPR will be an argument which must be evaluated.
11631 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11632 COMPOUND_EXPR in the chain will contain the tree for the simplified
11633 form of the builtin function call. */
11636 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11638 if (!validate_arg (s1, POINTER_TYPE)
11639 || !validate_arg (s2, INTEGER_TYPE))
11646 if (TREE_CODE (s2) != INTEGER_CST)
11649 p1 = c_getstr (s1);
11656 if (target_char_cast (s2, &c))
11659 r = strrchr (p1, c);
11662 return build_int_cst (TREE_TYPE (s1), 0);
11664 /* Return an offset into the constant string argument. */
11665 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11666 s1, size_int (r - p1));
11667 return fold_convert_loc (loc, type, tem);
11670 if (! integer_zerop (s2))
11673 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11677 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11678 return build_call_expr_loc (loc, fn, 2, s1, s2);
11682 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11683 to the call, and TYPE is its return type.
11685 Return NULL_TREE if no simplification was possible, otherwise return the
11686 simplified form of the call as a tree.
11688 The simplified form may be a constant or other expression which
11689 computes the same value, but in a more efficient manner (including
11690 calls to other builtin functions).
11692 The call may contain arguments which need to be evaluated, but
11693 which are not useful to determine the result of the call. In
11694 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11695 COMPOUND_EXPR will be an argument which must be evaluated.
11696 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11697 COMPOUND_EXPR in the chain will contain the tree for the simplified
11698 form of the builtin function call. */
11701 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11703 if (!validate_arg (s1, POINTER_TYPE)
11704 || !validate_arg (s2, POINTER_TYPE))
11709 const char *p1, *p2;
11711 p2 = c_getstr (s2);
11715 p1 = c_getstr (s1);
11718 const char *r = strpbrk (p1, p2);
11722 return build_int_cst (TREE_TYPE (s1), 0);
11724 /* Return an offset into the constant string argument. */
11725 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11726 s1, size_int (r - p1));
11727 return fold_convert_loc (loc, type, tem);
11731 /* strpbrk(x, "") == NULL.
11732 Evaluate and ignore s1 in case it had side-effects. */
11733 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11736 return NULL_TREE; /* Really call strpbrk. */
11738 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11742 /* New argument list transforming strpbrk(s1, s2) to
11743 strchr(s1, s2[0]). */
11744 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11748 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11751 Return NULL_TREE if no simplification was possible, otherwise return the
11752 simplified form of the call as a tree.
11754 The simplified form may be a constant or other expression which
11755 computes the same value, but in a more efficient manner (including
11756 calls to other builtin functions).
11758 The call may contain arguments which need to be evaluated, but
11759 which are not useful to determine the result of the call. In
11760 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11761 COMPOUND_EXPR will be an argument which must be evaluated.
11762 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11763 COMPOUND_EXPR in the chain will contain the tree for the simplified
11764 form of the builtin function call. */
11767 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11769 if (!validate_arg (dst, POINTER_TYPE)
11770 || !validate_arg (src, POINTER_TYPE))
11774 const char *p = c_getstr (src);
11776 /* If the string length is zero, return the dst parameter. */
11777 if (p && *p == '\0')
11784 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11785 arguments to the call.
11787 Return NULL_TREE if no simplification was possible, otherwise return the
11788 simplified form of the call as a tree.
11790 The simplified form may be a constant or other expression which
11791 computes the same value, but in a more efficient manner (including
11792 calls to other builtin functions).
11794 The call may contain arguments which need to be evaluated, but
11795 which are not useful to determine the result of the call. In
11796 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11797 COMPOUND_EXPR will be an argument which must be evaluated.
11798 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11799 COMPOUND_EXPR in the chain will contain the tree for the simplified
11800 form of the builtin function call. */
11803 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11805 if (!validate_arg (dst, POINTER_TYPE)
11806 || !validate_arg (src, POINTER_TYPE)
11807 || !validate_arg (len, INTEGER_TYPE))
11811 const char *p = c_getstr (src);
11813 /* If the requested length is zero, or the src parameter string
11814 length is zero, return the dst parameter. */
11815 if (integer_zerop (len) || (p && *p == '\0'))
11816 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11818 /* If the requested len is greater than or equal to the string
11819 length, call strcat. */
11820 if (TREE_CODE (len) == INTEGER_CST && p
11821 && compare_tree_int (len, strlen (p)) >= 0)
11823 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11825 /* If the replacement _DECL isn't initialized, don't do the
11830 return build_call_expr_loc (loc, fn, 2, dst, src);
11836 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11839 Return NULL_TREE if no simplification was possible, otherwise return the
11840 simplified form of the call as a tree.
11842 The simplified form may be a constant or other expression which
11843 computes the same value, but in a more efficient manner (including
11844 calls to other builtin functions).
11846 The call may contain arguments which need to be evaluated, but
11847 which are not useful to determine the result of the call. In
11848 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11849 COMPOUND_EXPR will be an argument which must be evaluated.
11850 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11851 COMPOUND_EXPR in the chain will contain the tree for the simplified
11852 form of the builtin function call. */
11855 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11857 if (!validate_arg (s1, POINTER_TYPE)
11858 || !validate_arg (s2, POINTER_TYPE))
11862 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11864 /* If both arguments are constants, evaluate at compile-time. */
11867 const size_t r = strspn (p1, p2);
11868 return size_int (r);
11871 /* If either argument is "", return NULL_TREE. */
11872 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11873 /* Evaluate and ignore both arguments in case either one has
11875 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11881 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11884 Return NULL_TREE if no simplification was possible, otherwise return the
11885 simplified form of the call as a tree.
11887 The simplified form may be a constant or other expression which
11888 computes the same value, but in a more efficient manner (including
11889 calls to other builtin functions).
11891 The call may contain arguments which need to be evaluated, but
11892 which are not useful to determine the result of the call. In
11893 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11894 COMPOUND_EXPR will be an argument which must be evaluated.
11895 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11896 COMPOUND_EXPR in the chain will contain the tree for the simplified
11897 form of the builtin function call. */
11900 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11902 if (!validate_arg (s1, POINTER_TYPE)
11903 || !validate_arg (s2, POINTER_TYPE))
11907 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11909 /* If both arguments are constants, evaluate at compile-time. */
11912 const size_t r = strcspn (p1, p2);
11913 return size_int (r);
11916 /* If the first argument is "", return NULL_TREE. */
11917 if (p1 && *p1 == '\0')
11919 /* Evaluate and ignore argument s2 in case it has
11921 return omit_one_operand_loc (loc, size_type_node,
11922 size_zero_node, s2);
11925 /* If the second argument is "", return __builtin_strlen(s1). */
11926 if (p2 && *p2 == '\0')
11928 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11930 /* If the replacement _DECL isn't initialized, don't do the
11935 return build_call_expr_loc (loc, fn, 1, s1);
11941 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11942 to the call. IGNORE is true if the value returned
11943 by the builtin will be ignored. UNLOCKED is true is true if this
11944 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11945 the known length of the string. Return NULL_TREE if no simplification
11949 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11950 bool ignore, bool unlocked, tree len)
11952 /* If we're using an unlocked function, assume the other unlocked
11953 functions exist explicitly. */
11954 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11955 : implicit_built_in_decls[BUILT_IN_FPUTC];
11956 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11957 : implicit_built_in_decls[BUILT_IN_FWRITE];
11959 /* If the return value is used, don't do the transformation. */
11963 /* Verify the arguments in the original call. */
11964 if (!validate_arg (arg0, POINTER_TYPE)
11965 || !validate_arg (arg1, POINTER_TYPE))
11969 len = c_strlen (arg0, 0);
11971 /* Get the length of the string passed to fputs. If the length
11972 can't be determined, punt. */
11974 || TREE_CODE (len) != INTEGER_CST)
11977 switch (compare_tree_int (len, 1))
11979 case -1: /* length is 0, delete the call entirely . */
11980 return omit_one_operand_loc (loc, integer_type_node,
11981 integer_zero_node, arg1);;
11983 case 0: /* length is 1, call fputc. */
11985 const char *p = c_getstr (arg0);
11990 return build_call_expr_loc (loc, fn_fputc, 2,
11991 build_int_cst (NULL_TREE, p[0]), arg1);
11997 case 1: /* length is greater than 1, call fwrite. */
11999 /* If optimizing for size keep fputs. */
12000 if (optimize_function_for_size_p (cfun))
12002 /* New argument list transforming fputs(string, stream) to
12003 fwrite(string, 1, len, stream). */
12005 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12006 size_one_node, len, arg1);
12011 gcc_unreachable ();
12016 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12017 produced. False otherwise. This is done so that we don't output the error
12018 or warning twice or three times. */
12021 fold_builtin_next_arg (tree exp, bool va_start_p)
12023 tree fntype = TREE_TYPE (current_function_decl);
12024 int nargs = call_expr_nargs (exp);
12027 if (TYPE_ARG_TYPES (fntype) == 0
12028 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
12029 == void_type_node))
12031 error ("%<va_start%> used in function with fixed args");
12037 if (va_start_p && (nargs != 2))
12039 error ("wrong number of arguments to function %<va_start%>");
12042 arg = CALL_EXPR_ARG (exp, 1);
12044 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12045 when we checked the arguments and if needed issued a warning. */
12050 /* Evidently an out of date version of <stdarg.h>; can't validate
12051 va_start's second argument, but can still work as intended. */
12052 warning (0, "%<__builtin_next_arg%> called without an argument");
12055 else if (nargs > 1)
12057 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12060 arg = CALL_EXPR_ARG (exp, 0);
12063 if (TREE_CODE (arg) == SSA_NAME)
12064 arg = SSA_NAME_VAR (arg);
12066 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12067 or __builtin_next_arg (0) the first time we see it, after checking
12068 the arguments and if needed issuing a warning. */
12069 if (!integer_zerop (arg))
12071 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12073 /* Strip off all nops for the sake of the comparison. This
12074 is not quite the same as STRIP_NOPS. It does more.
12075 We must also strip off INDIRECT_EXPR for C++ reference
12077 while (CONVERT_EXPR_P (arg)
12078 || TREE_CODE (arg) == INDIRECT_REF)
12079 arg = TREE_OPERAND (arg, 0);
12080 if (arg != last_parm)
12082 /* FIXME: Sometimes with the tree optimizers we can get the
12083 not the last argument even though the user used the last
12084 argument. We just warn and set the arg to be the last
12085 argument so that we will get wrong-code because of
12087 warning (0, "second parameter of %<va_start%> not last named argument");
12090 /* Undefined by C99 7.15.1.4p4 (va_start):
12091 "If the parameter parmN is declared with the register storage
12092 class, with a function or array type, or with a type that is
12093 not compatible with the type that results after application of
12094 the default argument promotions, the behavior is undefined."
12096 else if (DECL_REGISTER (arg))
12097 warning (0, "undefined behaviour when second parameter of "
12098 "%<va_start%> is declared with %<register%> storage");
12100 /* We want to verify the second parameter just once before the tree
12101 optimizers are run and then avoid keeping it in the tree,
12102 as otherwise we could warn even for correct code like:
12103 void foo (int i, ...)
12104 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12106 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12108 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12114 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12115 ORIG may be null if this is a 2-argument call. We don't attempt to
12116 simplify calls with more than 3 arguments.
12118 Return NULL_TREE if no simplification was possible, otherwise return the
12119 simplified form of the call as a tree. If IGNORED is true, it means that
12120 the caller does not use the returned value of the function. */
12123 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12124 tree orig, int ignored)
12127 const char *fmt_str = NULL;
12129 /* Verify the required arguments in the original call. We deal with two
12130 types of sprintf() calls: 'sprintf (str, fmt)' and
12131 'sprintf (dest, "%s", orig)'. */
12132 if (!validate_arg (dest, POINTER_TYPE)
12133 || !validate_arg (fmt, POINTER_TYPE))
12135 if (orig && !validate_arg (orig, POINTER_TYPE))
12138 /* Check whether the format is a literal string constant. */
12139 fmt_str = c_getstr (fmt);
12140 if (fmt_str == NULL)
12144 retval = NULL_TREE;
12146 if (!init_target_chars ())
12149 /* If the format doesn't contain % args or %%, use strcpy. */
12150 if (strchr (fmt_str, target_percent) == NULL)
12152 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12157 /* Don't optimize sprintf (buf, "abc", ptr++). */
12161 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12162 'format' is known to contain no % formats. */
12163 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12165 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12168 /* If the format is "%s", use strcpy if the result isn't used. */
12169 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12172 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12177 /* Don't crash on sprintf (str1, "%s"). */
12181 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12184 retval = c_strlen (orig, 1);
12185 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12188 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12191 if (call && retval)
12193 retval = fold_convert_loc
12194 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
12196 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12202 /* Expand a call EXP to __builtin_object_size. */
12205 expand_builtin_object_size (tree exp)
12208 int object_size_type;
12209 tree fndecl = get_callee_fndecl (exp);
12211 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12213 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12215 expand_builtin_trap ();
12219 ost = CALL_EXPR_ARG (exp, 1);
12222 if (TREE_CODE (ost) != INTEGER_CST
12223 || tree_int_cst_sgn (ost) < 0
12224 || compare_tree_int (ost, 3) > 0)
12226 error ("%Klast argument of %D is not integer constant between 0 and 3",
12228 expand_builtin_trap ();
12232 object_size_type = tree_low_cst (ost, 0);
12234 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12237 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12238 FCODE is the BUILT_IN_* to use.
12239 Return NULL_RTX if we failed; the caller should emit a normal call,
12240 otherwise try to get the result in TARGET, if convenient (and in
12241 mode MODE if that's convenient). */
12244 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12245 enum built_in_function fcode)
12247 tree dest, src, len, size;
12249 if (!validate_arglist (exp,
12251 fcode == BUILT_IN_MEMSET_CHK
12252 ? INTEGER_TYPE : POINTER_TYPE,
12253 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12256 dest = CALL_EXPR_ARG (exp, 0);
12257 src = CALL_EXPR_ARG (exp, 1);
12258 len = CALL_EXPR_ARG (exp, 2);
12259 size = CALL_EXPR_ARG (exp, 3);
12261 if (! host_integerp (size, 1))
12264 if (host_integerp (len, 1) || integer_all_onesp (size))
12268 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12270 warning_at (tree_nonartificial_location (exp),
12271 0, "%Kcall to %D will always overflow destination buffer",
12272 exp, get_callee_fndecl (exp));
12277 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12278 mem{cpy,pcpy,move,set} is available. */
12281 case BUILT_IN_MEMCPY_CHK:
12282 fn = built_in_decls[BUILT_IN_MEMCPY];
12284 case BUILT_IN_MEMPCPY_CHK:
12285 fn = built_in_decls[BUILT_IN_MEMPCPY];
12287 case BUILT_IN_MEMMOVE_CHK:
12288 fn = built_in_decls[BUILT_IN_MEMMOVE];
12290 case BUILT_IN_MEMSET_CHK:
12291 fn = built_in_decls[BUILT_IN_MEMSET];
12300 fn = build_call_expr (fn, 3, dest, src, len);
12301 STRIP_TYPE_NOPS (fn);
12302 while (TREE_CODE (fn) == COMPOUND_EXPR)
12304 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12306 fn = TREE_OPERAND (fn, 1);
12308 if (TREE_CODE (fn) == CALL_EXPR)
12309 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12310 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12312 else if (fcode == BUILT_IN_MEMSET_CHK)
12316 unsigned int dest_align
12317 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12319 /* If DEST is not a pointer type, call the normal function. */
12320 if (dest_align == 0)
12323 /* If SRC and DEST are the same (and not volatile), do nothing. */
12324 if (operand_equal_p (src, dest, 0))
12328 if (fcode != BUILT_IN_MEMPCPY_CHK)
12330 /* Evaluate and ignore LEN in case it has side-effects. */
12331 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12332 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12335 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12336 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12339 /* __memmove_chk special case. */
12340 if (fcode == BUILT_IN_MEMMOVE_CHK)
12342 unsigned int src_align
12343 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12345 if (src_align == 0)
12348 /* If src is categorized for a readonly section we can use
12349 normal __memcpy_chk. */
12350 if (readonly_data_expr (src))
12352 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12355 fn = build_call_expr (fn, 4, dest, src, len, size);
12356 STRIP_TYPE_NOPS (fn);
12357 while (TREE_CODE (fn) == COMPOUND_EXPR)
12359 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12361 fn = TREE_OPERAND (fn, 1);
12363 if (TREE_CODE (fn) == CALL_EXPR)
12364 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12365 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12372 /* Emit warning if a buffer overflow is detected at compile time. */
12375 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12379 location_t loc = tree_nonartificial_location (exp);
12383 case BUILT_IN_STRCPY_CHK:
12384 case BUILT_IN_STPCPY_CHK:
12385 /* For __strcat_chk the warning will be emitted only if overflowing
12386 by at least strlen (dest) + 1 bytes. */
12387 case BUILT_IN_STRCAT_CHK:
12388 len = CALL_EXPR_ARG (exp, 1);
12389 size = CALL_EXPR_ARG (exp, 2);
12392 case BUILT_IN_STRNCAT_CHK:
12393 case BUILT_IN_STRNCPY_CHK:
12394 len = CALL_EXPR_ARG (exp, 2);
12395 size = CALL_EXPR_ARG (exp, 3);
12397 case BUILT_IN_SNPRINTF_CHK:
12398 case BUILT_IN_VSNPRINTF_CHK:
12399 len = CALL_EXPR_ARG (exp, 1);
12400 size = CALL_EXPR_ARG (exp, 3);
12403 gcc_unreachable ();
12409 if (! host_integerp (size, 1) || integer_all_onesp (size))
12414 len = c_strlen (len, 1);
12415 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12418 else if (fcode == BUILT_IN_STRNCAT_CHK)
12420 tree src = CALL_EXPR_ARG (exp, 1);
12421 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12423 src = c_strlen (src, 1);
12424 if (! src || ! host_integerp (src, 1))
12426 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12427 exp, get_callee_fndecl (exp));
12430 else if (tree_int_cst_lt (src, size))
12433 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12436 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12437 exp, get_callee_fndecl (exp));
12440 /* Emit warning if a buffer overflow is detected at compile time
12441 in __sprintf_chk/__vsprintf_chk calls. */
12444 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12446 tree dest, size, len, fmt, flag;
12447 const char *fmt_str;
12448 int nargs = call_expr_nargs (exp);
12450 /* Verify the required arguments in the original call. */
12454 dest = CALL_EXPR_ARG (exp, 0);
12455 flag = CALL_EXPR_ARG (exp, 1);
12456 size = CALL_EXPR_ARG (exp, 2);
12457 fmt = CALL_EXPR_ARG (exp, 3);
12459 if (! host_integerp (size, 1) || integer_all_onesp (size))
12462 /* Check whether the format is a literal string constant. */
12463 fmt_str = c_getstr (fmt);
12464 if (fmt_str == NULL)
12467 if (!init_target_chars ())
12470 /* If the format doesn't contain % args or %%, we know its size. */
12471 if (strchr (fmt_str, target_percent) == 0)
12472 len = build_int_cstu (size_type_node, strlen (fmt_str));
12473 /* If the format is "%s" and first ... argument is a string literal,
12475 else if (fcode == BUILT_IN_SPRINTF_CHK
12476 && strcmp (fmt_str, target_percent_s) == 0)
12482 arg = CALL_EXPR_ARG (exp, 4);
12483 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12486 len = c_strlen (arg, 1);
12487 if (!len || ! host_integerp (len, 1))
12493 if (! tree_int_cst_lt (len, size))
12494 warning_at (tree_nonartificial_location (exp),
12495 0, "%Kcall to %D will always overflow destination buffer",
12496 exp, get_callee_fndecl (exp));
12499 /* Emit warning if a free is called with address of a variable. */
12502 maybe_emit_free_warning (tree exp)
12504 tree arg = CALL_EXPR_ARG (exp, 0);
12507 if (TREE_CODE (arg) != ADDR_EXPR)
12510 arg = get_base_address (TREE_OPERAND (arg, 0));
12511 if (arg == NULL || INDIRECT_REF_P (arg))
12514 if (SSA_VAR_P (arg))
12515 warning_at (tree_nonartificial_location (exp),
12516 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12518 warning_at (tree_nonartificial_location (exp),
12519 0, "%Kattempt to free a non-heap object", exp);
12522 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12526 fold_builtin_object_size (tree ptr, tree ost)
12528 tree ret = NULL_TREE;
12529 int object_size_type;
12531 if (!validate_arg (ptr, POINTER_TYPE)
12532 || !validate_arg (ost, INTEGER_TYPE))
12537 if (TREE_CODE (ost) != INTEGER_CST
12538 || tree_int_cst_sgn (ost) < 0
12539 || compare_tree_int (ost, 3) > 0)
12542 object_size_type = tree_low_cst (ost, 0);
12544 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12545 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12546 and (size_t) 0 for types 2 and 3. */
12547 if (TREE_SIDE_EFFECTS (ptr))
12548 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12550 if (TREE_CODE (ptr) == ADDR_EXPR)
12551 ret = build_int_cstu (size_type_node,
12552 compute_builtin_object_size (ptr, object_size_type));
12554 else if (TREE_CODE (ptr) == SSA_NAME)
12556 unsigned HOST_WIDE_INT bytes;
12558 /* If object size is not known yet, delay folding until
12559 later. Maybe subsequent passes will help determining
12561 bytes = compute_builtin_object_size (ptr, object_size_type);
12562 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12564 ret = build_int_cstu (size_type_node, bytes);
12569 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12570 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12571 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12578 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12579 DEST, SRC, LEN, and SIZE are the arguments to the call.
12580 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12581 code of the builtin. If MAXLEN is not NULL, it is maximum length
12582 passed as third argument. */
12585 fold_builtin_memory_chk (location_t loc, tree fndecl,
12586 tree dest, tree src, tree len, tree size,
12587 tree maxlen, bool ignore,
12588 enum built_in_function fcode)
12592 if (!validate_arg (dest, POINTER_TYPE)
12593 || !validate_arg (src,
12594 (fcode == BUILT_IN_MEMSET_CHK
12595 ? INTEGER_TYPE : POINTER_TYPE))
12596 || !validate_arg (len, INTEGER_TYPE)
12597 || !validate_arg (size, INTEGER_TYPE))
12600 /* If SRC and DEST are the same (and not volatile), return DEST
12601 (resp. DEST+LEN for __mempcpy_chk). */
12602 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12604 if (fcode != BUILT_IN_MEMPCPY_CHK)
12605 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12609 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12611 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12615 if (! host_integerp (size, 1))
12618 if (! integer_all_onesp (size))
12620 if (! host_integerp (len, 1))
12622 /* If LEN is not constant, try MAXLEN too.
12623 For MAXLEN only allow optimizing into non-_ocs function
12624 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12625 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12627 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12629 /* (void) __mempcpy_chk () can be optimized into
12630 (void) __memcpy_chk (). */
12631 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12635 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12643 if (tree_int_cst_lt (size, maxlen))
12648 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12649 mem{cpy,pcpy,move,set} is available. */
12652 case BUILT_IN_MEMCPY_CHK:
12653 fn = built_in_decls[BUILT_IN_MEMCPY];
12655 case BUILT_IN_MEMPCPY_CHK:
12656 fn = built_in_decls[BUILT_IN_MEMPCPY];
12658 case BUILT_IN_MEMMOVE_CHK:
12659 fn = built_in_decls[BUILT_IN_MEMMOVE];
12661 case BUILT_IN_MEMSET_CHK:
12662 fn = built_in_decls[BUILT_IN_MEMSET];
12671 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12674 /* Fold a call to the __st[rp]cpy_chk builtin.
12675 DEST, SRC, and SIZE are the arguments to the call.
12676 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12677 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12678 strings passed as second argument. */
12681 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12682 tree src, tree size,
12683 tree maxlen, bool ignore,
12684 enum built_in_function fcode)
12688 if (!validate_arg (dest, POINTER_TYPE)
12689 || !validate_arg (src, POINTER_TYPE)
12690 || !validate_arg (size, INTEGER_TYPE))
12693 /* If SRC and DEST are the same (and not volatile), return DEST. */
12694 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12695 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12697 if (! host_integerp (size, 1))
12700 if (! integer_all_onesp (size))
12702 len = c_strlen (src, 1);
12703 if (! len || ! host_integerp (len, 1))
12705 /* If LEN is not constant, try MAXLEN too.
12706 For MAXLEN only allow optimizing into non-_ocs function
12707 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12708 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12710 if (fcode == BUILT_IN_STPCPY_CHK)
12715 /* If return value of __stpcpy_chk is ignored,
12716 optimize into __strcpy_chk. */
12717 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12721 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12724 if (! len || TREE_SIDE_EFFECTS (len))
12727 /* If c_strlen returned something, but not a constant,
12728 transform __strcpy_chk into __memcpy_chk. */
12729 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12733 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12734 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12735 build_call_expr_loc (loc, fn, 4,
12736 dest, src, len, size));
12742 if (! tree_int_cst_lt (maxlen, size))
12746 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12747 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12748 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12752 return build_call_expr_loc (loc, fn, 2, dest, src);
12755 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12756 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12757 length passed as third argument. */
12760 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12761 tree len, tree size, tree maxlen)
12765 if (!validate_arg (dest, POINTER_TYPE)
12766 || !validate_arg (src, POINTER_TYPE)
12767 || !validate_arg (len, INTEGER_TYPE)
12768 || !validate_arg (size, INTEGER_TYPE))
12771 if (! host_integerp (size, 1))
12774 if (! integer_all_onesp (size))
12776 if (! host_integerp (len, 1))
12778 /* If LEN is not constant, try MAXLEN too.
12779 For MAXLEN only allow optimizing into non-_ocs function
12780 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12781 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12787 if (tree_int_cst_lt (size, maxlen))
12791 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12792 fn = built_in_decls[BUILT_IN_STRNCPY];
12796 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12799 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12800 are the arguments to the call. */
12803 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12804 tree src, tree size)
12809 if (!validate_arg (dest, POINTER_TYPE)
12810 || !validate_arg (src, POINTER_TYPE)
12811 || !validate_arg (size, INTEGER_TYPE))
12814 p = c_getstr (src);
12815 /* If the SRC parameter is "", return DEST. */
12816 if (p && *p == '\0')
12817 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12819 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12822 /* If __builtin_strcat_chk is used, assume strcat is available. */
12823 fn = built_in_decls[BUILT_IN_STRCAT];
12827 return build_call_expr_loc (loc, fn, 2, dest, src);
12830 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12834 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12835 tree dest, tree src, tree len, tree size)
12840 if (!validate_arg (dest, POINTER_TYPE)
12841 || !validate_arg (src, POINTER_TYPE)
12842 || !validate_arg (size, INTEGER_TYPE)
12843 || !validate_arg (size, INTEGER_TYPE))
12846 p = c_getstr (src);
12847 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12848 if (p && *p == '\0')
12849 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12850 else if (integer_zerop (len))
12851 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12853 if (! host_integerp (size, 1))
12856 if (! integer_all_onesp (size))
12858 tree src_len = c_strlen (src, 1);
12860 && host_integerp (src_len, 1)
12861 && host_integerp (len, 1)
12862 && ! tree_int_cst_lt (len, src_len))
12864 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12865 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12869 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12874 /* If __builtin_strncat_chk is used, assume strncat is available. */
12875 fn = built_in_decls[BUILT_IN_STRNCAT];
12879 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12882 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12883 a normal call should be emitted rather than expanding the function
12884 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12887 fold_builtin_sprintf_chk (location_t loc, tree exp,
12888 enum built_in_function fcode)
12890 tree dest, size, len, fn, fmt, flag;
12891 const char *fmt_str;
12892 int nargs = call_expr_nargs (exp);
12894 /* Verify the required arguments in the original call. */
12897 dest = CALL_EXPR_ARG (exp, 0);
12898 if (!validate_arg (dest, POINTER_TYPE))
12900 flag = CALL_EXPR_ARG (exp, 1);
12901 if (!validate_arg (flag, INTEGER_TYPE))
12903 size = CALL_EXPR_ARG (exp, 2);
12904 if (!validate_arg (size, INTEGER_TYPE))
12906 fmt = CALL_EXPR_ARG (exp, 3);
12907 if (!validate_arg (fmt, POINTER_TYPE))
12910 if (! host_integerp (size, 1))
12915 if (!init_target_chars ())
12918 /* Check whether the format is a literal string constant. */
12919 fmt_str = c_getstr (fmt);
12920 if (fmt_str != NULL)
12922 /* If the format doesn't contain % args or %%, we know the size. */
12923 if (strchr (fmt_str, target_percent) == 0)
12925 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12926 len = build_int_cstu (size_type_node, strlen (fmt_str));
12928 /* If the format is "%s" and first ... argument is a string literal,
12929 we know the size too. */
12930 else if (fcode == BUILT_IN_SPRINTF_CHK
12931 && strcmp (fmt_str, target_percent_s) == 0)
12937 arg = CALL_EXPR_ARG (exp, 4);
12938 if (validate_arg (arg, POINTER_TYPE))
12940 len = c_strlen (arg, 1);
12941 if (! len || ! host_integerp (len, 1))
12948 if (! integer_all_onesp (size))
12950 if (! len || ! tree_int_cst_lt (len, size))
12954 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12955 or if format doesn't contain % chars or is "%s". */
12956 if (! integer_zerop (flag))
12958 if (fmt_str == NULL)
12960 if (strchr (fmt_str, target_percent) != NULL
12961 && strcmp (fmt_str, target_percent_s))
12965 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12966 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12967 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12971 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12974 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12975 a normal call should be emitted rather than expanding the function
12976 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12977 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12978 passed as second argument. */
12981 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12982 enum built_in_function fcode)
12984 tree dest, size, len, fn, fmt, flag;
12985 const char *fmt_str;
12987 /* Verify the required arguments in the original call. */
12988 if (call_expr_nargs (exp) < 5)
12990 dest = CALL_EXPR_ARG (exp, 0);
12991 if (!validate_arg (dest, POINTER_TYPE))
12993 len = CALL_EXPR_ARG (exp, 1);
12994 if (!validate_arg (len, INTEGER_TYPE))
12996 flag = CALL_EXPR_ARG (exp, 2);
12997 if (!validate_arg (flag, INTEGER_TYPE))
12999 size = CALL_EXPR_ARG (exp, 3);
13000 if (!validate_arg (size, INTEGER_TYPE))
13002 fmt = CALL_EXPR_ARG (exp, 4);
13003 if (!validate_arg (fmt, POINTER_TYPE))
13006 if (! host_integerp (size, 1))
13009 if (! integer_all_onesp (size))
13011 if (! host_integerp (len, 1))
13013 /* If LEN is not constant, try MAXLEN too.
13014 For MAXLEN only allow optimizing into non-_ocs function
13015 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13016 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13022 if (tree_int_cst_lt (size, maxlen))
13026 if (!init_target_chars ())
13029 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13030 or if format doesn't contain % chars or is "%s". */
13031 if (! integer_zerop (flag))
13033 fmt_str = c_getstr (fmt);
13034 if (fmt_str == NULL)
13036 if (strchr (fmt_str, target_percent) != NULL
13037 && strcmp (fmt_str, target_percent_s))
13041 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13043 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13044 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13048 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
13051 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13052 FMT and ARG are the arguments to the call; we don't fold cases with
13053 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13055 Return NULL_TREE if no simplification was possible, otherwise return the
13056 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13057 code of the function to be simplified. */
13060 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13061 tree arg, bool ignore,
13062 enum built_in_function fcode)
13064 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13065 const char *fmt_str = NULL;
13067 /* If the return value is used, don't do the transformation. */
13071 /* Verify the required arguments in the original call. */
13072 if (!validate_arg (fmt, POINTER_TYPE))
13075 /* Check whether the format is a literal string constant. */
13076 fmt_str = c_getstr (fmt);
13077 if (fmt_str == NULL)
13080 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13082 /* If we're using an unlocked function, assume the other
13083 unlocked functions exist explicitly. */
13084 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
13085 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
13089 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
13090 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
13093 if (!init_target_chars ())
13096 if (strcmp (fmt_str, target_percent_s) == 0
13097 || strchr (fmt_str, target_percent) == NULL)
13101 if (strcmp (fmt_str, target_percent_s) == 0)
13103 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13106 if (!arg || !validate_arg (arg, POINTER_TYPE))
13109 str = c_getstr (arg);
13115 /* The format specifier doesn't contain any '%' characters. */
13116 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13122 /* If the string was "", printf does nothing. */
13123 if (str[0] == '\0')
13124 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13126 /* If the string has length of 1, call putchar. */
13127 if (str[1] == '\0')
13129 /* Given printf("c"), (where c is any one character,)
13130 convert "c"[0] to an int and pass that to the replacement
13132 newarg = build_int_cst (NULL_TREE, str[0]);
13134 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13138 /* If the string was "string\n", call puts("string"). */
13139 size_t len = strlen (str);
13140 if ((unsigned char)str[len - 1] == target_newline)
13142 /* Create a NUL-terminated string that's one char shorter
13143 than the original, stripping off the trailing '\n'. */
13144 char *newstr = XALLOCAVEC (char, len);
13145 memcpy (newstr, str, len - 1);
13146 newstr[len - 1] = 0;
13148 newarg = build_string_literal (len, newstr);
13150 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13153 /* We'd like to arrange to call fputs(string,stdout) here,
13154 but we need stdout and don't have a way to get it yet. */
13159 /* The other optimizations can be done only on the non-va_list variants. */
13160 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13163 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13164 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13166 if (!arg || !validate_arg (arg, POINTER_TYPE))
13169 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13172 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13173 else if (strcmp (fmt_str, target_percent_c) == 0)
13175 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13178 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13184 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13187 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13188 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13189 more than 3 arguments, and ARG may be null in the 2-argument case.
13191 Return NULL_TREE if no simplification was possible, otherwise return the
13192 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13193 code of the function to be simplified. */
13196 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13197 tree fmt, tree arg, bool ignore,
13198 enum built_in_function fcode)
13200 tree fn_fputc, fn_fputs, call = NULL_TREE;
13201 const char *fmt_str = NULL;
13203 /* If the return value is used, don't do the transformation. */
13207 /* Verify the required arguments in the original call. */
13208 if (!validate_arg (fp, POINTER_TYPE))
13210 if (!validate_arg (fmt, POINTER_TYPE))
13213 /* Check whether the format is a literal string constant. */
13214 fmt_str = c_getstr (fmt);
13215 if (fmt_str == NULL)
13218 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13220 /* If we're using an unlocked function, assume the other
13221 unlocked functions exist explicitly. */
13222 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13223 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13227 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13228 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13231 if (!init_target_chars ())
13234 /* If the format doesn't contain % args or %%, use strcpy. */
13235 if (strchr (fmt_str, target_percent) == NULL)
13237 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13241 /* If the format specifier was "", fprintf does nothing. */
13242 if (fmt_str[0] == '\0')
13244 /* If FP has side-effects, just wait until gimplification is
13246 if (TREE_SIDE_EFFECTS (fp))
13249 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13252 /* When "string" doesn't contain %, replace all cases of
13253 fprintf (fp, string) with fputs (string, fp). The fputs
13254 builtin will take care of special cases like length == 1. */
13256 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13259 /* The other optimizations can be done only on the non-va_list variants. */
13260 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13263 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13264 else if (strcmp (fmt_str, target_percent_s) == 0)
13266 if (!arg || !validate_arg (arg, POINTER_TYPE))
13269 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13272 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13273 else if (strcmp (fmt_str, target_percent_c) == 0)
13275 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13278 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13283 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13286 /* Initialize format string characters in the target charset. */
13289 init_target_chars (void)
13294 target_newline = lang_hooks.to_target_charset ('\n');
13295 target_percent = lang_hooks.to_target_charset ('%');
13296 target_c = lang_hooks.to_target_charset ('c');
13297 target_s = lang_hooks.to_target_charset ('s');
13298 if (target_newline == 0 || target_percent == 0 || target_c == 0
13302 target_percent_c[0] = target_percent;
13303 target_percent_c[1] = target_c;
13304 target_percent_c[2] = '\0';
13306 target_percent_s[0] = target_percent;
13307 target_percent_s[1] = target_s;
13308 target_percent_s[2] = '\0';
13310 target_percent_s_newline[0] = target_percent;
13311 target_percent_s_newline[1] = target_s;
13312 target_percent_s_newline[2] = target_newline;
13313 target_percent_s_newline[3] = '\0';
13320 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13321 and no overflow/underflow occurred. INEXACT is true if M was not
13322 exactly calculated. TYPE is the tree type for the result. This
13323 function assumes that you cleared the MPFR flags and then
13324 calculated M to see if anything subsequently set a flag prior to
13325 entering this function. Return NULL_TREE if any checks fail. */
13328 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13330 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13331 overflow/underflow occurred. If -frounding-math, proceed iff the
13332 result of calling FUNC was exact. */
13333 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13334 && (!flag_rounding_math || !inexact))
13336 REAL_VALUE_TYPE rr;
13338 real_from_mpfr (&rr, m, type, GMP_RNDN);
13339 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13340 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13341 but the mpft_t is not, then we underflowed in the
13343 if (real_isfinite (&rr)
13344 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13346 REAL_VALUE_TYPE rmode;
13348 real_convert (&rmode, TYPE_MODE (type), &rr);
13349 /* Proceed iff the specified mode can hold the value. */
13350 if (real_identical (&rmode, &rr))
13351 return build_real (type, rmode);
13358 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13359 number and no overflow/underflow occurred. INEXACT is true if M
13360 was not exactly calculated. TYPE is the tree type for the result.
13361 This function assumes that you cleared the MPFR flags and then
13362 calculated M to see if anything subsequently set a flag prior to
13363 entering this function. Return NULL_TREE if any checks fail, if
13364 FORCE_CONVERT is true, then bypass the checks. */
13367 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13369 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13370 overflow/underflow occurred. If -frounding-math, proceed iff the
13371 result of calling FUNC was exact. */
13373 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13374 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13375 && (!flag_rounding_math || !inexact)))
13377 REAL_VALUE_TYPE re, im;
13379 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
13380 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
13381 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13382 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13383 but the mpft_t is not, then we underflowed in the
13386 || (real_isfinite (&re) && real_isfinite (&im)
13387 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13388 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13390 REAL_VALUE_TYPE re_mode, im_mode;
13392 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13393 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13394 /* Proceed iff the specified mode can hold the value. */
13396 || (real_identical (&re_mode, &re)
13397 && real_identical (&im_mode, &im)))
13398 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13399 build_real (TREE_TYPE (type), im_mode));
13404 #endif /* HAVE_mpc */
13406 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13407 FUNC on it and return the resulting value as a tree with type TYPE.
13408 If MIN and/or MAX are not NULL, then the supplied ARG must be
13409 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13410 acceptable values, otherwise they are not. The mpfr precision is
13411 set to the precision of TYPE. We assume that function FUNC returns
13412 zero if the result could be calculated exactly within the requested
13416 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13417 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13420 tree result = NULL_TREE;
13424 /* To proceed, MPFR must exactly represent the target floating point
13425 format, which only happens when the target base equals two. */
13426 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13427 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13429 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13431 if (real_isfinite (ra)
13432 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13433 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13435 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13436 const int prec = fmt->p;
13437 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13441 mpfr_init2 (m, prec);
13442 mpfr_from_real (m, ra, GMP_RNDN);
13443 mpfr_clear_flags ();
13444 inexact = func (m, m, rnd);
13445 result = do_mpfr_ckconv (m, type, inexact);
13453 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13454 FUNC on it and return the resulting value as a tree with type TYPE.
13455 The mpfr precision is set to the precision of TYPE. We assume that
13456 function FUNC returns zero if the result could be calculated
13457 exactly within the requested precision. */
13460 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13461 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13463 tree result = NULL_TREE;
13468 /* To proceed, MPFR must exactly represent the target floating point
13469 format, which only happens when the target base equals two. */
13470 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13471 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13472 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13474 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13475 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13477 if (real_isfinite (ra1) && real_isfinite (ra2))
13479 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13480 const int prec = fmt->p;
13481 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13485 mpfr_inits2 (prec, m1, m2, NULL);
13486 mpfr_from_real (m1, ra1, GMP_RNDN);
13487 mpfr_from_real (m2, ra2, GMP_RNDN);
13488 mpfr_clear_flags ();
13489 inexact = func (m1, m1, m2, rnd);
13490 result = do_mpfr_ckconv (m1, type, inexact);
13491 mpfr_clears (m1, m2, NULL);
13498 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13499 FUNC on it and return the resulting value as a tree with type TYPE.
13500 The mpfr precision is set to the precision of TYPE. We assume that
13501 function FUNC returns zero if the result could be calculated
13502 exactly within the requested precision. */
13505 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13506 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13508 tree result = NULL_TREE;
13514 /* To proceed, MPFR must exactly represent the target floating point
13515 format, which only happens when the target base equals two. */
13516 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13517 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13518 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13519 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13521 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13522 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13523 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13525 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13527 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13528 const int prec = fmt->p;
13529 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13533 mpfr_inits2 (prec, m1, m2, m3, NULL);
13534 mpfr_from_real (m1, ra1, GMP_RNDN);
13535 mpfr_from_real (m2, ra2, GMP_RNDN);
13536 mpfr_from_real (m3, ra3, GMP_RNDN);
13537 mpfr_clear_flags ();
13538 inexact = func (m1, m1, m2, m3, rnd);
13539 result = do_mpfr_ckconv (m1, type, inexact);
13540 mpfr_clears (m1, m2, m3, NULL);
13547 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13548 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13549 If ARG_SINP and ARG_COSP are NULL then the result is returned
13550 as a complex value.
13551 The type is taken from the type of ARG and is used for setting the
13552 precision of the calculation and results. */
13555 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13557 tree const type = TREE_TYPE (arg);
13558 tree result = NULL_TREE;
13562 /* To proceed, MPFR must exactly represent the target floating point
13563 format, which only happens when the target base equals two. */
13564 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13565 && TREE_CODE (arg) == REAL_CST
13566 && !TREE_OVERFLOW (arg))
13568 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13570 if (real_isfinite (ra))
13572 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13573 const int prec = fmt->p;
13574 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13575 tree result_s, result_c;
13579 mpfr_inits2 (prec, m, ms, mc, NULL);
13580 mpfr_from_real (m, ra, GMP_RNDN);
13581 mpfr_clear_flags ();
13582 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13583 result_s = do_mpfr_ckconv (ms, type, inexact);
13584 result_c = do_mpfr_ckconv (mc, type, inexact);
13585 mpfr_clears (m, ms, mc, NULL);
13586 if (result_s && result_c)
13588 /* If we are to return in a complex value do so. */
13589 if (!arg_sinp && !arg_cosp)
13590 return build_complex (build_complex_type (type),
13591 result_c, result_s);
13593 /* Dereference the sin/cos pointer arguments. */
13594 arg_sinp = build_fold_indirect_ref (arg_sinp);
13595 arg_cosp = build_fold_indirect_ref (arg_cosp);
13596 /* Proceed if valid pointer type were passed in. */
13597 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13598 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13600 /* Set the values. */
13601 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13603 TREE_SIDE_EFFECTS (result_s) = 1;
13604 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13606 TREE_SIDE_EFFECTS (result_c) = 1;
13607 /* Combine the assignments into a compound expr. */
13608 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13609 result_s, result_c));
13617 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13618 two-argument mpfr order N Bessel function FUNC on them and return
13619 the resulting value as a tree with type TYPE. The mpfr precision
13620 is set to the precision of TYPE. We assume that function FUNC
13621 returns zero if the result could be calculated exactly within the
13622 requested precision. */
13624 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13625 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13626 const REAL_VALUE_TYPE *min, bool inclusive)
13628 tree result = NULL_TREE;
13633 /* To proceed, MPFR must exactly represent the target floating point
13634 format, which only happens when the target base equals two. */
13635 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13636 && host_integerp (arg1, 0)
13637 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13639 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13640 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13643 && real_isfinite (ra)
13644 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13646 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13647 const int prec = fmt->p;
13648 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13652 mpfr_init2 (m, prec);
13653 mpfr_from_real (m, ra, GMP_RNDN);
13654 mpfr_clear_flags ();
13655 inexact = func (m, n, m, rnd);
13656 result = do_mpfr_ckconv (m, type, inexact);
13664 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13665 the pointer *(ARG_QUO) and return the result. The type is taken
13666 from the type of ARG0 and is used for setting the precision of the
13667 calculation and results. */
13670 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13672 tree const type = TREE_TYPE (arg0);
13673 tree result = NULL_TREE;
13678 /* To proceed, MPFR must exactly represent the target floating point
13679 format, which only happens when the target base equals two. */
13680 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13681 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13682 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13684 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13685 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13687 if (real_isfinite (ra0) && real_isfinite (ra1))
13689 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13690 const int prec = fmt->p;
13691 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13696 mpfr_inits2 (prec, m0, m1, NULL);
13697 mpfr_from_real (m0, ra0, GMP_RNDN);
13698 mpfr_from_real (m1, ra1, GMP_RNDN);
13699 mpfr_clear_flags ();
13700 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13701 /* Remquo is independent of the rounding mode, so pass
13702 inexact=0 to do_mpfr_ckconv(). */
13703 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13704 mpfr_clears (m0, m1, NULL);
13707 /* MPFR calculates quo in the host's long so it may
13708 return more bits in quo than the target int can hold
13709 if sizeof(host long) > sizeof(target int). This can
13710 happen even for native compilers in LP64 mode. In
13711 these cases, modulo the quo value with the largest
13712 number that the target int can hold while leaving one
13713 bit for the sign. */
13714 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13715 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13717 /* Dereference the quo pointer argument. */
13718 arg_quo = build_fold_indirect_ref (arg_quo);
13719 /* Proceed iff a valid pointer type was passed in. */
13720 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13722 /* Set the value. */
13723 tree result_quo = fold_build2 (MODIFY_EXPR,
13724 TREE_TYPE (arg_quo), arg_quo,
13725 build_int_cst (NULL, integer_quo));
13726 TREE_SIDE_EFFECTS (result_quo) = 1;
13727 /* Combine the quo assignment with the rem. */
13728 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13729 result_quo, result_rem));
13737 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13738 resulting value as a tree with type TYPE. The mpfr precision is
13739 set to the precision of TYPE. We assume that this mpfr function
13740 returns zero if the result could be calculated exactly within the
13741 requested precision. In addition, the integer pointer represented
13742 by ARG_SG will be dereferenced and set to the appropriate signgam
13746 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13748 tree result = NULL_TREE;
13752 /* To proceed, MPFR must exactly represent the target floating point
13753 format, which only happens when the target base equals two. Also
13754 verify ARG is a constant and that ARG_SG is an int pointer. */
13755 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13756 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13757 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13758 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13760 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13762 /* In addition to NaN and Inf, the argument cannot be zero or a
13763 negative integer. */
13764 if (real_isfinite (ra)
13765 && ra->cl != rvc_zero
13766 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13768 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13769 const int prec = fmt->p;
13770 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13775 mpfr_init2 (m, prec);
13776 mpfr_from_real (m, ra, GMP_RNDN);
13777 mpfr_clear_flags ();
13778 inexact = mpfr_lgamma (m, &sg, m, rnd);
13779 result_lg = do_mpfr_ckconv (m, type, inexact);
13785 /* Dereference the arg_sg pointer argument. */
13786 arg_sg = build_fold_indirect_ref (arg_sg);
13787 /* Assign the signgam value into *arg_sg. */
13788 result_sg = fold_build2 (MODIFY_EXPR,
13789 TREE_TYPE (arg_sg), arg_sg,
13790 build_int_cst (NULL, sg));
13791 TREE_SIDE_EFFECTS (result_sg) = 1;
13792 /* Combine the signgam assignment with the lgamma result. */
13793 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13794 result_sg, result_lg));
13803 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13804 function FUNC on it and return the resulting value as a tree with
13805 type TYPE. The mpfr precision is set to the precision of TYPE. We
13806 assume that function FUNC returns zero if the result could be
13807 calculated exactly within the requested precision. */
13810 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13812 tree result = NULL_TREE;
13816 /* To proceed, MPFR must exactly represent the target floating point
13817 format, which only happens when the target base equals two. */
13818 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13819 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13820 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13822 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13823 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13825 if (real_isfinite (re) && real_isfinite (im))
13827 const struct real_format *const fmt =
13828 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13829 const int prec = fmt->p;
13830 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13831 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13835 mpc_init2 (m, prec);
13836 mpfr_from_real (mpc_realref(m), re, rnd);
13837 mpfr_from_real (mpc_imagref(m), im, rnd);
13838 mpfr_clear_flags ();
13839 inexact = func (m, m, crnd);
13840 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13848 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13849 mpc function FUNC on it and return the resulting value as a tree
13850 with type TYPE. The mpfr precision is set to the precision of
13851 TYPE. We assume that function FUNC returns zero if the result
13852 could be calculated exactly within the requested precision. If
13853 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13854 in the arguments and/or results. */
13858 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13859 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13861 tree result = NULL_TREE;
13866 /* To proceed, MPFR must exactly represent the target floating point
13867 format, which only happens when the target base equals two. */
13868 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13869 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13870 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13871 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13872 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13874 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13875 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13876 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13877 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13880 || (real_isfinite (re0) && real_isfinite (im0)
13881 && real_isfinite (re1) && real_isfinite (im1)))
13883 const struct real_format *const fmt =
13884 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13885 const int prec = fmt->p;
13886 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13887 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13891 mpc_init2 (m0, prec);
13892 mpc_init2 (m1, prec);
13893 mpfr_from_real (mpc_realref(m0), re0, rnd);
13894 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13895 mpfr_from_real (mpc_realref(m1), re1, rnd);
13896 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13897 mpfr_clear_flags ();
13898 inexact = func (m0, m0, m1, crnd);
13899 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13908 #endif /* HAVE_mpc */
13911 The functions below provide an alternate interface for folding
13912 builtin function calls presented as GIMPLE_CALL statements rather
13913 than as CALL_EXPRs. The folded result is still expressed as a
13914 tree. There is too much code duplication in the handling of
13915 varargs functions, and a more intrusive re-factoring would permit
13916 better sharing of code between the tree and statement-based
13917 versions of these functions. */
13919 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13920 along with N new arguments specified as the "..." parameters. SKIP
13921 is the number of arguments in STMT to be omitted. This function is used
13922 to do varargs-to-varargs transformations. */
13925 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13927 int oldnargs = gimple_call_num_args (stmt);
13928 int nargs = oldnargs - skip + n;
13929 tree fntype = TREE_TYPE (fndecl);
13930 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13934 location_t loc = gimple_location (stmt);
13936 buffer = XALLOCAVEC (tree, nargs);
13938 for (i = 0; i < n; i++)
13939 buffer[i] = va_arg (ap, tree);
13941 for (j = skip; j < oldnargs; j++, i++)
13942 buffer[i] = gimple_call_arg (stmt, j);
13944 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13947 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13948 a normal call should be emitted rather than expanding the function
13949 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13952 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13954 tree dest, size, len, fn, fmt, flag;
13955 const char *fmt_str;
13956 int nargs = gimple_call_num_args (stmt);
13958 /* Verify the required arguments in the original call. */
13961 dest = gimple_call_arg (stmt, 0);
13962 if (!validate_arg (dest, POINTER_TYPE))
13964 flag = gimple_call_arg (stmt, 1);
13965 if (!validate_arg (flag, INTEGER_TYPE))
13967 size = gimple_call_arg (stmt, 2);
13968 if (!validate_arg (size, INTEGER_TYPE))
13970 fmt = gimple_call_arg (stmt, 3);
13971 if (!validate_arg (fmt, POINTER_TYPE))
13974 if (! host_integerp (size, 1))
13979 if (!init_target_chars ())
13982 /* Check whether the format is a literal string constant. */
13983 fmt_str = c_getstr (fmt);
13984 if (fmt_str != NULL)
13986 /* If the format doesn't contain % args or %%, we know the size. */
13987 if (strchr (fmt_str, target_percent) == 0)
13989 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13990 len = build_int_cstu (size_type_node, strlen (fmt_str));
13992 /* If the format is "%s" and first ... argument is a string literal,
13993 we know the size too. */
13994 else if (fcode == BUILT_IN_SPRINTF_CHK
13995 && strcmp (fmt_str, target_percent_s) == 0)
14001 arg = gimple_call_arg (stmt, 4);
14002 if (validate_arg (arg, POINTER_TYPE))
14004 len = c_strlen (arg, 1);
14005 if (! len || ! host_integerp (len, 1))
14012 if (! integer_all_onesp (size))
14014 if (! len || ! tree_int_cst_lt (len, size))
14018 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
14019 or if format doesn't contain % chars or is "%s". */
14020 if (! integer_zerop (flag))
14022 if (fmt_str == NULL)
14024 if (strchr (fmt_str, target_percent) != NULL
14025 && strcmp (fmt_str, target_percent_s))
14029 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
14030 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
14031 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
14035 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
14038 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14039 a normal call should be emitted rather than expanding the function
14040 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14041 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14042 passed as second argument. */
14045 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14046 enum built_in_function fcode)
14048 tree dest, size, len, fn, fmt, flag;
14049 const char *fmt_str;
14051 /* Verify the required arguments in the original call. */
14052 if (gimple_call_num_args (stmt) < 5)
14054 dest = gimple_call_arg (stmt, 0);
14055 if (!validate_arg (dest, POINTER_TYPE))
14057 len = gimple_call_arg (stmt, 1);
14058 if (!validate_arg (len, INTEGER_TYPE))
14060 flag = gimple_call_arg (stmt, 2);
14061 if (!validate_arg (flag, INTEGER_TYPE))
14063 size = gimple_call_arg (stmt, 3);
14064 if (!validate_arg (size, INTEGER_TYPE))
14066 fmt = gimple_call_arg (stmt, 4);
14067 if (!validate_arg (fmt, POINTER_TYPE))
14070 if (! host_integerp (size, 1))
14073 if (! integer_all_onesp (size))
14075 if (! host_integerp (len, 1))
14077 /* If LEN is not constant, try MAXLEN too.
14078 For MAXLEN only allow optimizing into non-_ocs function
14079 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
14080 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
14086 if (tree_int_cst_lt (size, maxlen))
14090 if (!init_target_chars ())
14093 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
14094 or if format doesn't contain % chars or is "%s". */
14095 if (! integer_zerop (flag))
14097 fmt_str = c_getstr (fmt);
14098 if (fmt_str == NULL)
14100 if (strchr (fmt_str, target_percent) != NULL
14101 && strcmp (fmt_str, target_percent_s))
14105 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
14107 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
14108 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
14112 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
14115 /* Builtins with folding operations that operate on "..." arguments
14116 need special handling; we need to store the arguments in a convenient
14117 data structure before attempting any folding. Fortunately there are
14118 only a few builtins that fall into this category. FNDECL is the
14119 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14120 result of the function call is ignored. */
14123 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14124 bool ignore ATTRIBUTE_UNUSED)
14126 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14127 tree ret = NULL_TREE;
14131 case BUILT_IN_SPRINTF_CHK:
14132 case BUILT_IN_VSPRINTF_CHK:
14133 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14136 case BUILT_IN_SNPRINTF_CHK:
14137 case BUILT_IN_VSNPRINTF_CHK:
14138 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14145 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14146 TREE_NO_WARNING (ret) = 1;
14152 /* A wrapper function for builtin folding that prevents warnings for
14153 "statement without effect" and the like, caused by removing the
14154 call node earlier than the warning is generated. */
14157 fold_call_stmt (gimple stmt, bool ignore)
14159 tree ret = NULL_TREE;
14160 tree fndecl = gimple_call_fndecl (stmt);
14161 location_t loc = gimple_location (stmt);
14163 && TREE_CODE (fndecl) == FUNCTION_DECL
14164 && DECL_BUILT_IN (fndecl)
14165 && !gimple_call_va_arg_pack_p (stmt))
14167 int nargs = gimple_call_num_args (stmt);
14169 if (avoid_folding_inline_builtin (fndecl))
14171 /* FIXME: Don't use a list in this interface. */
14172 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14174 tree arglist = NULL_TREE;
14176 for (i = nargs - 1; i >= 0; i--)
14177 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
14178 return targetm.fold_builtin (fndecl, arglist, ignore);
14182 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14184 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
14186 for (i = 0; i < nargs; i++)
14187 args[i] = gimple_call_arg (stmt, i);
14188 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14191 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14194 /* Propagate location information from original call to
14195 expansion of builtin. Otherwise things like
14196 maybe_emit_chk_warning, that operate on the expansion
14197 of a builtin, will use the wrong location information. */
14198 if (gimple_has_location (stmt))
14200 tree realret = ret;
14201 if (TREE_CODE (ret) == NOP_EXPR)
14202 realret = TREE_OPERAND (ret, 0);
14203 if (CAN_HAVE_LOCATION_P (realret)
14204 && !EXPR_HAS_LOCATION (realret))
14205 SET_EXPR_LOCATION (realret, loc);