1/* Internal functions.
2   Copyright (C) 2011-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "hash-set.h"
24#include "machmode.h"
25#include "vec.h"
26#include "double-int.h"
27#include "input.h"
28#include "alias.h"
29#include "symtab.h"
30#include "options.h"
31#include "wide-int.h"
32#include "inchash.h"
33#include "tree.h"
34#include "fold-const.h"
35#include "internal-fn.h"
36#include "stor-layout.h"
37#include "hashtab.h"
38#include "tm.h"
39#include "hard-reg-set.h"
40#include "function.h"
41#include "rtl.h"
42#include "flags.h"
43#include "statistics.h"
44#include "real.h"
45#include "fixed-value.h"
46#include "insn-config.h"
47#include "expmed.h"
48#include "dojump.h"
49#include "explow.h"
50#include "calls.h"
51#include "emit-rtl.h"
52#include "varasm.h"
53#include "stmt.h"
54#include "expr.h"
55#include "insn-codes.h"
56#include "optabs.h"
57#include "predict.h"
58#include "dominance.h"
59#include "cfg.h"
60#include "basic-block.h"
61#include "tree-ssa-alias.h"
62#include "gimple-expr.h"
63#include "is-a.h"
64#include "gimple.h"
65#include "ubsan.h"
66#include "target.h"
67#include "stringpool.h"
68#include "tree-ssanames.h"
69#include "diagnostic-core.h"
70
71/* The names of each internal function, indexed by function number.  */
72const char *const internal_fn_name_array[] = {
73#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
74#include "internal-fn.def"
75#undef DEF_INTERNAL_FN
76  "<invalid-fn>"
77};
78
79/* The ECF_* flags of each internal function, indexed by function number.  */
80const int internal_fn_flags_array[] = {
81#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
82#include "internal-fn.def"
83#undef DEF_INTERNAL_FN
84  0
85};
86
87/* Fnspec of each internal function, indexed by function number.  */
88const_tree internal_fn_fnspec_array[IFN_LAST + 1];
89
90void
91init_internal_fns ()
92{
93#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
94  if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
95    build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
96#include "internal-fn.def"
97#undef DEF_INTERNAL_FN
98  internal_fn_fnspec_array[IFN_LAST] = 0;
99}
100
101/* ARRAY_TYPE is an array of vector modes.  Return the associated insn
102   for load-lanes-style optab OPTAB.  The insn must exist.  */
103
104static enum insn_code
105get_multi_vector_move (tree array_type, convert_optab optab)
106{
107  enum insn_code icode;
108  machine_mode imode;
109  machine_mode vmode;
110
111  gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
112  imode = TYPE_MODE (array_type);
113  vmode = TYPE_MODE (TREE_TYPE (array_type));
114
115  icode = convert_optab_handler (optab, imode, vmode);
116  gcc_assert (icode != CODE_FOR_nothing);
117  return icode;
118}
119
120/* Expand LOAD_LANES call STMT.  */
121
122static void
123expand_LOAD_LANES (gcall *stmt)
124{
125  struct expand_operand ops[2];
126  tree type, lhs, rhs;
127  rtx target, mem;
128
129  lhs = gimple_call_lhs (stmt);
130  rhs = gimple_call_arg (stmt, 0);
131  type = TREE_TYPE (lhs);
132
133  target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
134  mem = expand_normal (rhs);
135
136  gcc_assert (MEM_P (mem));
137  PUT_MODE (mem, TYPE_MODE (type));
138
139  create_output_operand (&ops[0], target, TYPE_MODE (type));
140  create_fixed_operand (&ops[1], mem);
141  expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
142}
143
144/* Expand STORE_LANES call STMT.  */
145
146static void
147expand_STORE_LANES (gcall *stmt)
148{
149  struct expand_operand ops[2];
150  tree type, lhs, rhs;
151  rtx target, reg;
152
153  lhs = gimple_call_lhs (stmt);
154  rhs = gimple_call_arg (stmt, 0);
155  type = TREE_TYPE (rhs);
156
157  target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
158  reg = expand_normal (rhs);
159
160  gcc_assert (MEM_P (target));
161  PUT_MODE (target, TYPE_MODE (type));
162
163  create_fixed_operand (&ops[0], target);
164  create_input_operand (&ops[1], reg, TYPE_MODE (type));
165  expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
166}
167
168static void
169expand_ANNOTATE (gcall *)
170{
171  gcc_unreachable ();
172}
173
174/* This should get expanded in adjust_simduid_builtins.  */
175
176static void
177expand_GOMP_SIMD_LANE (gcall *)
178{
179  gcc_unreachable ();
180}
181
182/* This should get expanded in adjust_simduid_builtins.  */
183
184static void
185expand_GOMP_SIMD_VF (gcall *)
186{
187  gcc_unreachable ();
188}
189
190/* This should get expanded in adjust_simduid_builtins.  */
191
192static void
193expand_GOMP_SIMD_LAST_LANE (gcall *)
194{
195  gcc_unreachable ();
196}
197
198/* This should get expanded in the sanopt pass.  */
199
200static void
201expand_UBSAN_NULL (gcall *)
202{
203  gcc_unreachable ();
204}
205
206/* This should get expanded in the sanopt pass.  */
207
208static void
209expand_UBSAN_BOUNDS (gcall *)
210{
211  gcc_unreachable ();
212}
213
214/* This should get expanded in the sanopt pass.  */
215
216static void
217expand_UBSAN_VPTR (gcall *)
218{
219  gcc_unreachable ();
220}
221
222/* This should get expanded in the sanopt pass.  */
223
224static void
225expand_UBSAN_OBJECT_SIZE (gcall *)
226{
227  gcc_unreachable ();
228}
229
230/* This should get expanded in the sanopt pass.  */
231
232static void
233expand_ASAN_CHECK (gcall *)
234{
235  gcc_unreachable ();
236}
237
238/* This should get expanded in the tsan pass.  */
239
240static void
241expand_TSAN_FUNC_EXIT (gcall *)
242{
243  gcc_unreachable ();
244}
245
246/* Helper function for expand_addsub_overflow.  Return 1
247   if ARG interpreted as signed in its precision is known to be always
248   positive or 2 if ARG is known to be always negative, or 3 if ARG may
249   be positive or negative.  */
250
251static int
252get_range_pos_neg (tree arg)
253{
254  if (arg == error_mark_node)
255    return 3;
256
257  int prec = TYPE_PRECISION (TREE_TYPE (arg));
258  int cnt = 0;
259  if (TREE_CODE (arg) == INTEGER_CST)
260    {
261      wide_int w = wi::sext (arg, prec);
262      if (wi::neg_p (w))
263	return 2;
264      else
265	return 1;
266    }
267  while (CONVERT_EXPR_P (arg)
268	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
269	 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
270    {
271      arg = TREE_OPERAND (arg, 0);
272      /* Narrower value zero extended into wider type
273	 will always result in positive values.  */
274      if (TYPE_UNSIGNED (TREE_TYPE (arg))
275	  && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
276	return 1;
277      prec = TYPE_PRECISION (TREE_TYPE (arg));
278      if (++cnt > 30)
279	return 3;
280    }
281
282  if (TREE_CODE (arg) != SSA_NAME)
283    return 3;
284  wide_int arg_min, arg_max;
285  while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
286    {
287      gimple g = SSA_NAME_DEF_STMT (arg);
288      if (is_gimple_assign (g)
289	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
290	{
291	  tree t = gimple_assign_rhs1 (g);
292	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
293	      && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
294	    {
295	      if (TYPE_UNSIGNED (TREE_TYPE (t))
296		  && TYPE_PRECISION (TREE_TYPE (t)) < prec)
297		return 1;
298	      prec = TYPE_PRECISION (TREE_TYPE (t));
299	      arg = t;
300	      if (++cnt > 30)
301		return 3;
302	      continue;
303	    }
304	}
305      return 3;
306    }
307  if (TYPE_UNSIGNED (TREE_TYPE (arg)))
308    {
309      /* For unsigned values, the "positive" range comes
310	 below the "negative" range.  */
311      if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
312	return 1;
313      if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
314	return 2;
315    }
316  else
317    {
318      if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
319	return 1;
320      if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
321	return 2;
322    }
323  return 3;
324}
325
326/* Return minimum precision needed to represent all values
327   of ARG in SIGNed integral type.  */
328
329static int
330get_min_precision (tree arg, signop sign)
331{
332  int prec = TYPE_PRECISION (TREE_TYPE (arg));
333  int cnt = 0;
334  signop orig_sign = sign;
335  if (TREE_CODE (arg) == INTEGER_CST)
336    {
337      int p;
338      if (TYPE_SIGN (TREE_TYPE (arg)) != sign)
339	{
340	  widest_int w = wi::to_widest (arg);
341	  w = wi::ext (w, prec, sign);
342	  p = wi::min_precision (w, sign);
343	}
344      else
345	p = wi::min_precision (arg, sign);
346      return MIN (p, prec);
347    }
348  while (CONVERT_EXPR_P (arg)
349	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
350	 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
351    {
352      arg = TREE_OPERAND (arg, 0);
353      if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
354	{
355	  if (TYPE_UNSIGNED (TREE_TYPE (arg)))
356	    sign = UNSIGNED;
357	  else if (sign == UNSIGNED && get_range_pos_neg (arg) != 1)
358	    return prec + (orig_sign != sign);
359	  prec = TYPE_PRECISION (TREE_TYPE (arg));
360	}
361      if (++cnt > 30)
362	return prec + (orig_sign != sign);
363    }
364  if (TREE_CODE (arg) != SSA_NAME)
365    return prec + (orig_sign != sign);
366  wide_int arg_min, arg_max;
367  while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
368    {
369      gimple g = SSA_NAME_DEF_STMT (arg);
370      if (is_gimple_assign (g)
371	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
372	{
373	  tree t = gimple_assign_rhs1 (g);
374	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
375	      && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
376	    {
377	      arg = t;
378	      if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
379		{
380		  if (TYPE_UNSIGNED (TREE_TYPE (arg)))
381		    sign = UNSIGNED;
382		  else if (sign == UNSIGNED && get_range_pos_neg (arg) != 1)
383		    return prec + (orig_sign != sign);
384		  prec = TYPE_PRECISION (TREE_TYPE (arg));
385		}
386	      if (++cnt > 30)
387		return prec + (orig_sign != sign);
388	      continue;
389	    }
390	}
391      return prec + (orig_sign != sign);
392    }
393  if (sign == TYPE_SIGN (TREE_TYPE (arg)))
394    {
395      int p1 = wi::min_precision (arg_min, sign);
396      int p2 = wi::min_precision (arg_max, sign);
397      p1 = MAX (p1, p2);
398      prec = MIN (prec, p1);
399    }
400  else if (sign == UNSIGNED && !wi::neg_p (arg_min, SIGNED))
401    {
402      int p = wi::min_precision (arg_max, SIGNED);
403      prec = MIN (prec, p);
404    }
405  return prec + (orig_sign != sign);
406}
407
408/* Helper for expand_*_overflow.  Store RES into the __real__ part
409   of TARGET.  If RES has larger MODE than __real__ part of TARGET,
410   set the __imag__ part to 1 if RES doesn't fit into it.  */
411
412static void
413expand_arith_overflow_result_store (tree lhs, rtx target,
414				    machine_mode mode, rtx res)
415{
416  machine_mode tgtmode = GET_MODE_INNER (GET_MODE (target));
417  rtx lres = res;
418  if (tgtmode != mode)
419    {
420      rtx_code_label *done_label = gen_label_rtx ();
421      int uns = TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs)));
422      lres = convert_modes (tgtmode, mode, res, uns);
423      gcc_assert (GET_MODE_PRECISION (tgtmode) < GET_MODE_PRECISION (mode));
424      do_compare_rtx_and_jump (res, convert_modes (mode, tgtmode, lres, uns),
425			       EQ, true, mode, NULL_RTX, NULL_RTX, done_label,
426			       PROB_VERY_LIKELY);
427      write_complex_part (target, const1_rtx, true);
428      emit_label (done_label);
429    }
430  write_complex_part (target, lres, false);
431}
432
433/* Helper for expand_*_overflow.  Store RES into TARGET.  */
434
435static void
436expand_ubsan_result_store (rtx target, rtx res)
437{
438  if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
439    /* If this is a scalar in a register that is stored in a wider mode
440       than the declared mode, compute the result into its declared mode
441       and then convert to the wider mode.  Our value is the computed
442       expression.  */
443    convert_move (SUBREG_REG (target), res, SUBREG_PROMOTED_SIGN (target));
444  else
445    emit_move_insn (target, res);
446}
447
448/* Add sub/add overflow checking to the statement STMT.
449   CODE says whether the operation is +, or -.  */
450
451static void
452expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
453			tree arg0, tree arg1, bool unsr_p, bool uns0_p,
454			bool uns1_p, bool is_ubsan)
455{
456  rtx res, target = NULL_RTX;
457  tree fn;
458  rtx_code_label *done_label = gen_label_rtx ();
459  rtx_code_label *do_error = gen_label_rtx ();
460  do_pending_stack_adjust ();
461  rtx op0 = expand_normal (arg0);
462  rtx op1 = expand_normal (arg1);
463  machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
464  int prec = GET_MODE_PRECISION (mode);
465  rtx sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
466  bool do_xor = false;
467
468  if (is_ubsan)
469    gcc_assert (!unsr_p && !uns0_p && !uns1_p);
470
471  if (lhs)
472    {
473      target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
474      if (!is_ubsan)
475	write_complex_part (target, const0_rtx, true);
476    }
477
478  /* We assume both operands and result have the same precision
479     here (GET_MODE_BITSIZE (mode)), S stands for signed type
480     with that precision, U for unsigned type with that precision,
481     sgn for unsigned most significant bit in that precision.
482     s1 is signed first operand, u1 is unsigned first operand,
483     s2 is signed second operand, u2 is unsigned second operand,
484     sr is signed result, ur is unsigned result and the following
485     rules say how to compute result (which is always result of
486     the operands as if both were unsigned, cast to the right
487     signedness) and how to compute whether operation overflowed.
488
489     s1 + s2 -> sr
490	res = (S) ((U) s1 + (U) s2)
491	ovf = s2 < 0 ? res > s1 : res < s1 (or jump on overflow)
492     s1 - s2 -> sr
493	res = (S) ((U) s1 - (U) s2)
494	ovf = s2 < 0 ? res < s1 : res > s2 (or jump on overflow)
495     u1 + u2 -> ur
496	res = u1 + u2
497	ovf = res < u1 (or jump on carry, but RTL opts will handle it)
498     u1 - u2 -> ur
499	res = u1 - u2
500	ovf = res > u1 (or jump on carry, but RTL opts will handle it)
501     s1 + u2 -> sr
502	res = (S) ((U) s1 + u2)
503	ovf = ((U) res ^ sgn) < u2
504     s1 + u2 -> ur
505	t1 = (S) (u2 ^ sgn)
506	t2 = s1 + t1
507	res = (U) t2 ^ sgn
508	ovf = t1 < 0 ? t2 > s1 : t2 < s1 (or jump on overflow)
509     s1 - u2 -> sr
510	res = (S) ((U) s1 - u2)
511	ovf = u2 > ((U) s1 ^ sgn)
512     s1 - u2 -> ur
513	res = (U) s1 - u2
514	ovf = s1 < 0 || u2 > (U) s1
515     u1 - s2 -> sr
516	res = u1 - (U) s2
517 	ovf = u1 >= ((U) s2 ^ sgn)
518     u1 - s2 -> ur
519	t1 = u1 ^ sgn
520	t2 = t1 - (U) s2
521	res = t2 ^ sgn
522	ovf = s2 < 0 ? (S) t2 < (S) t1 : (S) t2 > (S) t1 (or jump on overflow)
523     s1 + s2 -> ur
524	res = (U) s1 + (U) s2
525	ovf = s2 < 0 ? (s1 | (S) res) < 0) : (s1 & (S) res) < 0)
526     u1 + u2 -> sr
527	res = (S) (u1 + u2)
528	ovf = (U) res < u2 || res < 0
529     u1 - u2 -> sr
530	res = (S) (u1 - u2)
531	ovf = u1 >= u2 ? res < 0 : res >= 0
532     s1 - s2 -> ur
533	res = (U) s1 - (U) s2
534	ovf = s2 >= 0 ? ((s1 | (S) res) < 0) : ((s1 & (S) res) < 0)  */
535
536  if (code == PLUS_EXPR && uns0_p && !uns1_p)
537    {
538      /* PLUS_EXPR is commutative, if operand signedness differs,
539	 canonicalize to the first operand being signed and second
540	 unsigned to simplify following code.  */
541      rtx tem = op1;
542      op1 = op0;
543      op0 = tem;
544      tree t = arg1;
545      arg1 = arg0;
546      arg0 = t;
547      uns0_p = 0;
548      uns1_p = 1;
549    }
550
551  /* u1 +- u2 -> ur  */
552  if (uns0_p && uns1_p && unsr_p)
553    {
554      /* Compute the operation.  On RTL level, the addition is always
555	 unsigned.  */
556      res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
557			  op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
558      rtx tem = op0;
559      /* For PLUS_EXPR, the operation is commutative, so we can pick
560	 operand to compare against.  For prec <= BITS_PER_WORD, I think
561	 preferring REG operand is better over CONST_INT, because
562	 the CONST_INT might enlarge the instruction or CSE would need
563	 to figure out we'd already loaded it into a register before.
564	 For prec > BITS_PER_WORD, I think CONST_INT might be more beneficial,
565	 as then the multi-word comparison can be perhaps simplified.  */
566      if (code == PLUS_EXPR
567	  && (prec <= BITS_PER_WORD
568	      ? (CONST_SCALAR_INT_P (op0) && REG_P (op1))
569	      : CONST_SCALAR_INT_P (op1)))
570	tem = op1;
571      do_compare_rtx_and_jump (res, tem, code == PLUS_EXPR ? GEU : LEU,
572			       true, mode, NULL_RTX, NULL_RTX, done_label,
573			       PROB_VERY_LIKELY);
574      goto do_error_label;
575    }
576
577  /* s1 +- u2 -> sr  */
578  if (!uns0_p && uns1_p && !unsr_p)
579    {
580      /* Compute the operation.  On RTL level, the addition is always
581	 unsigned.  */
582      res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
583			  op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
584      rtx tem = expand_binop (mode, add_optab,
585			      code == PLUS_EXPR ? res : op0, sgn,
586			      NULL_RTX, false, OPTAB_LIB_WIDEN);
587      do_compare_rtx_and_jump (tem, op1, GEU, true, mode, NULL_RTX, NULL_RTX,
588			       done_label, PROB_VERY_LIKELY);
589      goto do_error_label;
590    }
591
592  /* s1 + u2 -> ur  */
593  if (code == PLUS_EXPR && !uns0_p && uns1_p && unsr_p)
594    {
595      op1 = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
596			  OPTAB_LIB_WIDEN);
597      /* As we've changed op1, we have to avoid using the value range
598	 for the original argument.  */
599      arg1 = error_mark_node;
600      do_xor = true;
601      goto do_signed;
602    }
603
604  /* u1 - s2 -> ur  */
605  if (code == MINUS_EXPR && uns0_p && !uns1_p && unsr_p)
606    {
607      op0 = expand_binop (mode, add_optab, op0, sgn, NULL_RTX, false,
608			  OPTAB_LIB_WIDEN);
609      /* As we've changed op0, we have to avoid using the value range
610	 for the original argument.  */
611      arg0 = error_mark_node;
612      do_xor = true;
613      goto do_signed;
614    }
615
616  /* s1 - u2 -> ur  */
617  if (code == MINUS_EXPR && !uns0_p && uns1_p && unsr_p)
618    {
619      /* Compute the operation.  On RTL level, the addition is always
620	 unsigned.  */
621      res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
622			  OPTAB_LIB_WIDEN);
623      int pos_neg = get_range_pos_neg (arg0);
624      if (pos_neg == 2)
625	/* If ARG0 is known to be always negative, this is always overflow.  */
626	emit_jump (do_error);
627      else if (pos_neg == 3)
628	/* If ARG0 is not known to be always positive, check at runtime.  */
629	do_compare_rtx_and_jump (op0, const0_rtx, LT, false, mode, NULL_RTX,
630				 NULL_RTX, do_error, PROB_VERY_UNLIKELY);
631      do_compare_rtx_and_jump (op1, op0, LEU, true, mode, NULL_RTX, NULL_RTX,
632			       done_label, PROB_VERY_LIKELY);
633      goto do_error_label;
634    }
635
636  /* u1 - s2 -> sr  */
637  if (code == MINUS_EXPR && uns0_p && !uns1_p && !unsr_p)
638    {
639      /* Compute the operation.  On RTL level, the addition is always
640	 unsigned.  */
641      res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
642			  OPTAB_LIB_WIDEN);
643      rtx tem = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
644			      OPTAB_LIB_WIDEN);
645      do_compare_rtx_and_jump (op0, tem, LTU, true, mode, NULL_RTX, NULL_RTX,
646			       done_label, PROB_VERY_LIKELY);
647      goto do_error_label;
648    }
649
650  /* u1 + u2 -> sr  */
651  if (code == PLUS_EXPR && uns0_p && uns1_p && !unsr_p)
652    {
653      /* Compute the operation.  On RTL level, the addition is always
654	 unsigned.  */
655      res = expand_binop (mode, add_optab, op0, op1, NULL_RTX, false,
656			  OPTAB_LIB_WIDEN);
657      do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
658			       NULL_RTX, do_error, PROB_VERY_UNLIKELY);
659      rtx tem = op1;
660      /* The operation is commutative, so we can pick operand to compare
661	 against.  For prec <= BITS_PER_WORD, I think preferring REG operand
662	 is better over CONST_INT, because the CONST_INT might enlarge the
663	 instruction or CSE would need to figure out we'd already loaded it
664	 into a register before.  For prec > BITS_PER_WORD, I think CONST_INT
665	 might be more beneficial, as then the multi-word comparison can be
666	 perhaps simplified.  */
667      if (prec <= BITS_PER_WORD
668	  ? (CONST_SCALAR_INT_P (op1) && REG_P (op0))
669	  : CONST_SCALAR_INT_P (op0))
670	tem = op0;
671      do_compare_rtx_and_jump (res, tem, GEU, true, mode, NULL_RTX, NULL_RTX,
672			       done_label, PROB_VERY_LIKELY);
673      goto do_error_label;
674    }
675
676  /* s1 +- s2 -> ur  */
677  if (!uns0_p && !uns1_p && unsr_p)
678    {
679      /* Compute the operation.  On RTL level, the addition is always
680	 unsigned.  */
681      res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
682			  op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
683      int pos_neg = get_range_pos_neg (arg1);
684      if (code == PLUS_EXPR)
685	{
686	  int pos_neg0 = get_range_pos_neg (arg0);
687	  if (pos_neg0 != 3 && pos_neg == 3)
688	    {
689	      rtx tem = op1;
690	      op1 = op0;
691	      op0 = tem;
692	      pos_neg = pos_neg0;
693	    }
694	}
695      rtx tem;
696      if (pos_neg != 3)
697	{
698	  tem = expand_binop (mode, ((pos_neg == 1) ^ (code == MINUS_EXPR))
699				    ? and_optab : ior_optab,
700			      op0, res, NULL_RTX, false, OPTAB_LIB_WIDEN);
701	  do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
702				   NULL_RTX, done_label, PROB_VERY_LIKELY);
703	}
704      else
705	{
706	  rtx_code_label *do_ior_label = gen_label_rtx ();
707	  do_compare_rtx_and_jump (op1, const0_rtx,
708				   code == MINUS_EXPR ? GE : LT, false, mode,
709				   NULL_RTX, NULL_RTX, do_ior_label,
710				   PROB_EVEN);
711	  tem = expand_binop (mode, and_optab, op0, res, NULL_RTX, false,
712			      OPTAB_LIB_WIDEN);
713	  do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
714				   NULL_RTX, done_label, PROB_VERY_LIKELY);
715	  emit_jump (do_error);
716	  emit_label (do_ior_label);
717	  tem = expand_binop (mode, ior_optab, op0, res, NULL_RTX, false,
718			      OPTAB_LIB_WIDEN);
719	  do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
720				   NULL_RTX, done_label, PROB_VERY_LIKELY);
721	}
722      goto do_error_label;
723    }
724
725  /* u1 - u2 -> sr  */
726  if (code == MINUS_EXPR && uns0_p && uns1_p && !unsr_p)
727    {
728      /* Compute the operation.  On RTL level, the addition is always
729	 unsigned.  */
730      res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
731			  OPTAB_LIB_WIDEN);
732      rtx_code_label *op0_geu_op1 = gen_label_rtx ();
733      do_compare_rtx_and_jump (op0, op1, GEU, true, mode, NULL_RTX, NULL_RTX,
734			       op0_geu_op1, PROB_EVEN);
735      do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
736			       NULL_RTX, done_label, PROB_VERY_LIKELY);
737      emit_jump (do_error);
738      emit_label (op0_geu_op1);
739      do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
740			       NULL_RTX, done_label, PROB_VERY_LIKELY);
741      goto do_error_label;
742    }
743
744  gcc_assert (!uns0_p && !uns1_p && !unsr_p);
745
746  /* s1 +- s2 -> sr  */
747 do_signed: ;
748  enum insn_code icode;
749  icode = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
750  if (icode != CODE_FOR_nothing)
751    {
752      struct expand_operand ops[4];
753      rtx_insn *last = get_last_insn ();
754
755      res = gen_reg_rtx (mode);
756      create_output_operand (&ops[0], res, mode);
757      create_input_operand (&ops[1], op0, mode);
758      create_input_operand (&ops[2], op1, mode);
759      create_fixed_operand (&ops[3], do_error);
760      if (maybe_expand_insn (icode, 4, ops))
761	{
762	  last = get_last_insn ();
763	  if (profile_status_for_fn (cfun) != PROFILE_ABSENT
764	      && JUMP_P (last)
765	      && any_condjump_p (last)
766	      && !find_reg_note (last, REG_BR_PROB, 0))
767	    add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
768	  emit_jump (done_label);
769        }
770      else
771	{
772	  delete_insns_since (last);
773	  icode = CODE_FOR_nothing;
774	}
775    }
776
777  if (icode == CODE_FOR_nothing)
778    {
779      rtx_code_label *sub_check = gen_label_rtx ();
780      int pos_neg = 3;
781
782      /* Compute the operation.  On RTL level, the addition is always
783	 unsigned.  */
784      res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
785			  op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
786
787      /* If we can prove one of the arguments (for MINUS_EXPR only
788	 the second operand, as subtraction is not commutative) is always
789	 non-negative or always negative, we can do just one comparison
790	 and conditional jump instead of 2 at runtime, 3 present in the
791	 emitted code.  If one of the arguments is CONST_INT, all we
792	 need is to make sure it is op1, then the first
793	 do_compare_rtx_and_jump will be just folded.  Otherwise try
794	 to use range info if available.  */
795      if (code == PLUS_EXPR && CONST_INT_P (op0))
796	{
797	  rtx tem = op0;
798	  op0 = op1;
799	  op1 = tem;
800	}
801      else if (CONST_INT_P (op1))
802	;
803      else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
804	{
805	  pos_neg = get_range_pos_neg (arg0);
806	  if (pos_neg != 3)
807	    {
808	      rtx tem = op0;
809	      op0 = op1;
810	      op1 = tem;
811	    }
812	}
813      if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
814	pos_neg = get_range_pos_neg (arg1);
815
816      /* If the op1 is negative, we have to use a different check.  */
817      if (pos_neg == 3)
818	do_compare_rtx_and_jump (op1, const0_rtx, LT, false, mode, NULL_RTX,
819				 NULL_RTX, sub_check, PROB_EVEN);
820
821      /* Compare the result of the operation with one of the operands.  */
822      if (pos_neg & 1)
823	do_compare_rtx_and_jump (res, op0, code == PLUS_EXPR ? GE : LE,
824				 false, mode, NULL_RTX, NULL_RTX, done_label,
825				 PROB_VERY_LIKELY);
826
827      /* If we get here, we have to print the error.  */
828      if (pos_neg == 3)
829	{
830	  emit_jump (do_error);
831
832	  emit_label (sub_check);
833	}
834
835      /* We have k = a + b for b < 0 here.  k <= a must hold.  */
836      if (pos_neg & 2)
837	do_compare_rtx_and_jump (res, op0, code == PLUS_EXPR ? LE : GE,
838				 false, mode, NULL_RTX, NULL_RTX, done_label,
839				 PROB_VERY_LIKELY);
840    }
841
842 do_error_label:
843  emit_label (do_error);
844  if (is_ubsan)
845    {
846      /* Expand the ubsan builtin call.  */
847      push_temp_slots ();
848      fn = ubsan_build_overflow_builtin (code, loc, TREE_TYPE (arg0),
849					 arg0, arg1);
850      expand_normal (fn);
851      pop_temp_slots ();
852      do_pending_stack_adjust ();
853    }
854  else if (lhs)
855    write_complex_part (target, const1_rtx, true);
856
857  /* We're done.  */
858  emit_label (done_label);
859
860  if (lhs)
861    {
862      if (is_ubsan)
863	expand_ubsan_result_store (target, res);
864      else
865	{
866	  if (do_xor)
867	    res = expand_binop (mode, add_optab, res, sgn, NULL_RTX, false,
868				OPTAB_LIB_WIDEN);
869
870	  expand_arith_overflow_result_store (lhs, target, mode, res);
871	}
872    }
873}
874
875/* Add negate overflow checking to the statement STMT.  */
876
877static void
878expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan)
879{
880  rtx res, op1;
881  tree fn;
882  rtx_code_label *done_label, *do_error;
883  rtx target = NULL_RTX;
884
885  done_label = gen_label_rtx ();
886  do_error = gen_label_rtx ();
887
888  do_pending_stack_adjust ();
889  op1 = expand_normal (arg1);
890
891  machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
892  if (lhs)
893    {
894      target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
895      if (!is_ubsan)
896	write_complex_part (target, const0_rtx, true);
897    }
898
899  enum insn_code icode = optab_handler (negv3_optab, mode);
900  if (icode != CODE_FOR_nothing)
901    {
902      struct expand_operand ops[3];
903      rtx_insn *last = get_last_insn ();
904
905      res = gen_reg_rtx (mode);
906      create_output_operand (&ops[0], res, mode);
907      create_input_operand (&ops[1], op1, mode);
908      create_fixed_operand (&ops[2], do_error);
909      if (maybe_expand_insn (icode, 3, ops))
910	{
911	  last = get_last_insn ();
912	  if (profile_status_for_fn (cfun) != PROFILE_ABSENT
913	      && JUMP_P (last)
914	      && any_condjump_p (last)
915	      && !find_reg_note (last, REG_BR_PROB, 0))
916	    add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
917	  emit_jump (done_label);
918        }
919      else
920	{
921	  delete_insns_since (last);
922	  icode = CODE_FOR_nothing;
923	}
924    }
925
926  if (icode == CODE_FOR_nothing)
927    {
928      /* Compute the operation.  On RTL level, the addition is always
929	 unsigned.  */
930      res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
931
932      /* Compare the operand with the most negative value.  */
933      rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
934      do_compare_rtx_and_jump (op1, minv, NE, true, mode, NULL_RTX, NULL_RTX,
935			       done_label, PROB_VERY_LIKELY);
936    }
937
938  emit_label (do_error);
939  if (is_ubsan)
940    {
941      /* Expand the ubsan builtin call.  */
942      push_temp_slots ();
943      fn = ubsan_build_overflow_builtin (NEGATE_EXPR, loc, TREE_TYPE (arg1),
944					 arg1, NULL_TREE);
945      expand_normal (fn);
946      pop_temp_slots ();
947      do_pending_stack_adjust ();
948    }
949  else if (lhs)
950    write_complex_part (target, const1_rtx, true);
951
952  /* We're done.  */
953  emit_label (done_label);
954
955  if (lhs)
956    {
957      if (is_ubsan)
958	expand_ubsan_result_store (target, res);
959      else
960	expand_arith_overflow_result_store (lhs, target, mode, res);
961    }
962}
963
964/* Add mul overflow checking to the statement STMT.  */
965
966static void
967expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
968		     bool unsr_p, bool uns0_p, bool uns1_p, bool is_ubsan)
969{
970  rtx res, op0, op1;
971  tree fn, type;
972  rtx_code_label *done_label, *do_error;
973  rtx target = NULL_RTX;
974  signop sign;
975  enum insn_code icode;
976
977  done_label = gen_label_rtx ();
978  do_error = gen_label_rtx ();
979
980  do_pending_stack_adjust ();
981  op0 = expand_normal (arg0);
982  op1 = expand_normal (arg1);
983
984  machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
985  bool uns = unsr_p;
986  if (lhs)
987    {
988      target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
989      if (!is_ubsan)
990	write_complex_part (target, const0_rtx, true);
991    }
992
993  if (is_ubsan)
994    gcc_assert (!unsr_p && !uns0_p && !uns1_p);
995
996  /* We assume both operands and result have the same precision
997     here (GET_MODE_BITSIZE (mode)), S stands for signed type
998     with that precision, U for unsigned type with that precision,
999     sgn for unsigned most significant bit in that precision.
1000     s1 is signed first operand, u1 is unsigned first operand,
1001     s2 is signed second operand, u2 is unsigned second operand,
1002     sr is signed result, ur is unsigned result and the following
1003     rules say how to compute result (which is always result of
1004     the operands as if both were unsigned, cast to the right
1005     signedness) and how to compute whether operation overflowed.
1006     main_ovf (false) stands for jump on signed multiplication
1007     overflow or the main algorithm with uns == false.
1008     main_ovf (true) stands for jump on unsigned multiplication
1009     overflow or the main algorithm with uns == true.
1010
1011     s1 * s2 -> sr
1012	res = (S) ((U) s1 * (U) s2)
1013	ovf = main_ovf (false)
1014     u1 * u2 -> ur
1015	res = u1 * u2
1016	ovf = main_ovf (true)
1017     s1 * u2 -> ur
1018	res = (U) s1 * u2
1019	ovf = (s1 < 0 && u2) || main_ovf (true)
1020     u1 * u2 -> sr
1021	res = (S) (u1 * u2)
1022	ovf = res < 0 || main_ovf (true)
1023     s1 * u2 -> sr
1024	res = (S) ((U) s1 * u2)
1025	ovf = (S) u2 >= 0 ? main_ovf (false)
1026			  : (s1 != 0 && (s1 != -1 || u2 != (U) res))
1027     s1 * s2 -> ur
1028	t1 = (s1 & s2) < 0 ? (-(U) s1) : ((U) s1)
1029	t2 = (s1 & s2) < 0 ? (-(U) s2) : ((U) s2)
1030	res = t1 * t2
1031	ovf = (s1 ^ s2) < 0 ? (s1 && s2) : main_ovf (true)  */
1032
1033  if (uns0_p && !uns1_p)
1034    {
1035      /* Multiplication is commutative, if operand signedness differs,
1036	 canonicalize to the first operand being signed and second
1037	 unsigned to simplify following code.  */
1038      rtx tem = op1;
1039      op1 = op0;
1040      op0 = tem;
1041      tree t = arg1;
1042      arg1 = arg0;
1043      arg0 = t;
1044      uns0_p = 0;
1045      uns1_p = 1;
1046    }
1047
1048  int pos_neg0 = get_range_pos_neg (arg0);
1049  int pos_neg1 = get_range_pos_neg (arg1);
1050
1051  /* s1 * u2 -> ur  */
1052  if (!uns0_p && uns1_p && unsr_p)
1053    {
1054      switch (pos_neg0)
1055	{
1056	case 1:
1057	  /* If s1 is non-negative, just perform normal u1 * u2 -> ur.  */
1058	  goto do_main;
1059	case 2:
1060	  /* If s1 is negative, avoid the main code, just multiply and
1061	     signal overflow if op1 is not 0.  */
1062	  struct separate_ops ops;
1063	  ops.code = MULT_EXPR;
1064	  ops.type = TREE_TYPE (arg1);
1065	  ops.op0 = make_tree (ops.type, op0);
1066	  ops.op1 = make_tree (ops.type, op1);
1067	  ops.op2 = NULL_TREE;
1068	  ops.location = loc;
1069	  res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1070	  do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
1071				   NULL_RTX, done_label, PROB_VERY_LIKELY);
1072	  goto do_error_label;
1073	case 3:
1074	  rtx_code_label *do_main_label;
1075	  do_main_label = gen_label_rtx ();
1076	  do_compare_rtx_and_jump (op0, const0_rtx, GE, false, mode, NULL_RTX,
1077				   NULL_RTX, do_main_label, PROB_VERY_LIKELY);
1078	  do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
1079				   NULL_RTX, do_main_label, PROB_VERY_LIKELY);
1080	  write_complex_part (target, const1_rtx, true);
1081	  emit_label (do_main_label);
1082	  goto do_main;
1083	default:
1084	  gcc_unreachable ();
1085	}
1086    }
1087
1088  /* u1 * u2 -> sr  */
1089  if (uns0_p && uns1_p && !unsr_p)
1090    {
1091      uns = true;
1092      /* Rest of handling of this case after res is computed.  */
1093      goto do_main;
1094    }
1095
1096  /* s1 * u2 -> sr  */
1097  if (!uns0_p && uns1_p && !unsr_p)
1098    {
1099      switch (pos_neg1)
1100	{
1101	case 1:
1102	  goto do_main;
1103	case 2:
1104	  /* If (S) u2 is negative (i.e. u2 is larger than maximum of S,
1105	     avoid the main code, just multiply and signal overflow
1106	     unless 0 * u2 or -1 * ((U) Smin).  */
1107	  struct separate_ops ops;
1108	  ops.code = MULT_EXPR;
1109	  ops.type = TREE_TYPE (arg1);
1110	  ops.op0 = make_tree (ops.type, op0);
1111	  ops.op1 = make_tree (ops.type, op1);
1112	  ops.op2 = NULL_TREE;
1113	  ops.location = loc;
1114	  res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1115	  do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
1116				   NULL_RTX, done_label, PROB_VERY_LIKELY);
1117	  do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
1118				   NULL_RTX, do_error, PROB_VERY_UNLIKELY);
1119	  int prec;
1120	  prec = GET_MODE_PRECISION (mode);
1121	  rtx sgn;
1122	  sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
1123	  do_compare_rtx_and_jump (op1, sgn, EQ, true, mode, NULL_RTX,
1124				   NULL_RTX, done_label, PROB_VERY_LIKELY);
1125	  goto do_error_label;
1126	case 3:
1127	  /* Rest of handling of this case after res is computed.  */
1128	  goto do_main;
1129	default:
1130	  gcc_unreachable ();
1131	}
1132    }
1133
1134  /* s1 * s2 -> ur  */
1135  if (!uns0_p && !uns1_p && unsr_p)
1136    {
1137      rtx tem, tem2;
1138      switch (pos_neg0 | pos_neg1)
1139	{
1140	case 1: /* Both operands known to be non-negative.  */
1141	  goto do_main;
1142	case 2: /* Both operands known to be negative.  */
1143	  op0 = expand_unop (mode, neg_optab, op0, NULL_RTX, false);
1144	  op1 = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
1145	  /* Avoid looking at arg0/arg1 ranges, as we've changed
1146	     the arguments.  */
1147	  arg0 = error_mark_node;
1148	  arg1 = error_mark_node;
1149	  goto do_main;
1150	case 3:
1151	  if ((pos_neg0 ^ pos_neg1) == 3)
1152	    {
1153	      /* If one operand is known to be negative and the other
1154		 non-negative, this overflows always, unless the non-negative
1155		 one is 0.  Just do normal multiply and set overflow
1156		 unless one of the operands is 0.  */
1157	      struct separate_ops ops;
1158	      ops.code = MULT_EXPR;
1159	      ops.type
1160		= build_nonstandard_integer_type (GET_MODE_PRECISION (mode),
1161						  1);
1162	      ops.op0 = make_tree (ops.type, op0);
1163	      ops.op1 = make_tree (ops.type, op1);
1164	      ops.op2 = NULL_TREE;
1165	      ops.location = loc;
1166	      res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1167	      tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
1168				  OPTAB_LIB_WIDEN);
1169	      do_compare_rtx_and_jump (tem, const0_rtx, EQ, true, mode,
1170				       NULL_RTX, NULL_RTX, done_label,
1171				       PROB_VERY_LIKELY);
1172	      goto do_error_label;
1173	    }
1174	  /* The general case, do all the needed comparisons at runtime.  */
1175	  rtx_code_label *do_main_label, *after_negate_label;
1176	  rtx rop0, rop1;
1177	  rop0 = gen_reg_rtx (mode);
1178	  rop1 = gen_reg_rtx (mode);
1179	  emit_move_insn (rop0, op0);
1180	  emit_move_insn (rop1, op1);
1181	  op0 = rop0;
1182	  op1 = rop1;
1183	  do_main_label = gen_label_rtx ();
1184	  after_negate_label = gen_label_rtx ();
1185	  tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
1186			      OPTAB_LIB_WIDEN);
1187	  do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
1188				   NULL_RTX, after_negate_label,
1189				   PROB_VERY_LIKELY);
1190	  /* Both arguments negative here, negate them and continue with
1191	     normal unsigned overflow checking multiplication.  */
1192	  emit_move_insn (op0, expand_unop (mode, neg_optab, op0,
1193					    NULL_RTX, false));
1194	  emit_move_insn (op1, expand_unop (mode, neg_optab, op1,
1195					    NULL_RTX, false));
1196	  /* Avoid looking at arg0/arg1 ranges, as we might have changed
1197	     the arguments.  */
1198	  arg0 = error_mark_node;
1199	  arg1 = error_mark_node;
1200	  emit_jump (do_main_label);
1201	  emit_label (after_negate_label);
1202	  tem2 = expand_binop (mode, xor_optab, op0, op1, NULL_RTX, false,
1203			       OPTAB_LIB_WIDEN);
1204	  do_compare_rtx_and_jump (tem2, const0_rtx, GE, false, mode, NULL_RTX,
1205				   NULL_RTX, do_main_label, PROB_VERY_LIKELY);
1206	  /* One argument is negative here, the other positive.  This
1207	     overflows always, unless one of the arguments is 0.  But
1208	     if e.g. s2 is 0, (U) s1 * 0 doesn't overflow, whatever s1
1209	     is, thus we can keep do_main code oring in overflow as is.  */
1210	  do_compare_rtx_and_jump (tem, const0_rtx, EQ, true, mode, NULL_RTX,
1211				   NULL_RTX, do_main_label, PROB_VERY_LIKELY);
1212	  write_complex_part (target, const1_rtx, true);
1213	  emit_label (do_main_label);
1214	  goto do_main;
1215	default:
1216	  gcc_unreachable ();
1217	}
1218    }
1219
1220 do_main:
1221  type = build_nonstandard_integer_type (GET_MODE_PRECISION (mode), uns);
1222  sign = uns ? UNSIGNED : SIGNED;
1223  icode = optab_handler (uns ? umulv4_optab : mulv4_optab, mode);
1224  if (icode != CODE_FOR_nothing)
1225    {
1226      struct expand_operand ops[4];
1227      rtx_insn *last = get_last_insn ();
1228
1229      res = gen_reg_rtx (mode);
1230      create_output_operand (&ops[0], res, mode);
1231      create_input_operand (&ops[1], op0, mode);
1232      create_input_operand (&ops[2], op1, mode);
1233      create_fixed_operand (&ops[3], do_error);
1234      if (maybe_expand_insn (icode, 4, ops))
1235	{
1236	  last = get_last_insn ();
1237	  if (profile_status_for_fn (cfun) != PROFILE_ABSENT
1238	      && JUMP_P (last)
1239	      && any_condjump_p (last)
1240	      && !find_reg_note (last, REG_BR_PROB, 0))
1241	    add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
1242	  emit_jump (done_label);
1243        }
1244      else
1245	{
1246	  delete_insns_since (last);
1247	  icode = CODE_FOR_nothing;
1248	}
1249    }
1250
1251  if (icode == CODE_FOR_nothing)
1252    {
1253      struct separate_ops ops;
1254      int prec = GET_MODE_PRECISION (mode);
1255      machine_mode hmode = mode_for_size (prec / 2, MODE_INT, 1);
1256      ops.op0 = make_tree (type, op0);
1257      ops.op1 = make_tree (type, op1);
1258      ops.op2 = NULL_TREE;
1259      ops.location = loc;
1260      if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
1261	  && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
1262	{
1263	  machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
1264	  ops.code = WIDEN_MULT_EXPR;
1265	  ops.type
1266	    = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), uns);
1267
1268	  res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
1269	  rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res, prec,
1270				     NULL_RTX, uns);
1271	  hipart = gen_lowpart (mode, hipart);
1272	  res = gen_lowpart (mode, res);
1273	  if (uns)
1274	    /* For the unsigned multiplication, there was overflow if
1275	       HIPART is non-zero.  */
1276	    do_compare_rtx_and_jump (hipart, const0_rtx, EQ, true, mode,
1277				     NULL_RTX, NULL_RTX, done_label,
1278				     PROB_VERY_LIKELY);
1279	  else
1280	    {
1281	      rtx signbit = expand_shift (RSHIFT_EXPR, mode, res, prec - 1,
1282					  NULL_RTX, 0);
1283	      /* RES is low half of the double width result, HIPART
1284		 the high half.  There was overflow if
1285		 HIPART is different from RES < 0 ? -1 : 0.  */
1286	      do_compare_rtx_and_jump (signbit, hipart, EQ, true, mode,
1287				       NULL_RTX, NULL_RTX, done_label,
1288				       PROB_VERY_LIKELY);
1289	    }
1290	}
1291      else if (hmode != BLKmode && 2 * GET_MODE_PRECISION (hmode) == prec)
1292	{
1293	  rtx_code_label *large_op0 = gen_label_rtx ();
1294	  rtx_code_label *small_op0_large_op1 = gen_label_rtx ();
1295	  rtx_code_label *one_small_one_large = gen_label_rtx ();
1296	  rtx_code_label *both_ops_large = gen_label_rtx ();
1297	  rtx_code_label *after_hipart_neg = uns ? NULL : gen_label_rtx ();
1298	  rtx_code_label *after_lopart_neg = uns ? NULL : gen_label_rtx ();
1299	  rtx_code_label *do_overflow = gen_label_rtx ();
1300	  rtx_code_label *hipart_different = uns ? NULL : gen_label_rtx ();
1301
1302	  unsigned int hprec = GET_MODE_PRECISION (hmode);
1303	  rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
1304				      NULL_RTX, uns);
1305	  hipart0 = gen_lowpart (hmode, hipart0);
1306	  rtx lopart0 = gen_lowpart (hmode, op0);
1307	  rtx signbit0 = const0_rtx;
1308	  if (!uns)
1309	    signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
1310				     NULL_RTX, 0);
1311	  rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
1312				      NULL_RTX, uns);
1313	  hipart1 = gen_lowpart (hmode, hipart1);
1314	  rtx lopart1 = gen_lowpart (hmode, op1);
1315	  rtx signbit1 = const0_rtx;
1316	  if (!uns)
1317	    signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
1318				     NULL_RTX, 0);
1319
1320	  res = gen_reg_rtx (mode);
1321
1322	  /* True if op0 resp. op1 are known to be in the range of
1323	     halfstype.  */
1324	  bool op0_small_p = false;
1325	  bool op1_small_p = false;
1326	  /* True if op0 resp. op1 are known to have all zeros or all ones
1327	     in the upper half of bits, but are not known to be
1328	     op{0,1}_small_p.  */
1329	  bool op0_medium_p = false;
1330	  bool op1_medium_p = false;
1331	  /* -1 if op{0,1} is known to be negative, 0 if it is known to be
1332	     nonnegative, 1 if unknown.  */
1333	  int op0_sign = 1;
1334	  int op1_sign = 1;
1335
1336	  if (pos_neg0 == 1)
1337	    op0_sign = 0;
1338	  else if (pos_neg0 == 2)
1339	    op0_sign = -1;
1340	  if (pos_neg1 == 1)
1341	    op1_sign = 0;
1342	  else if (pos_neg1 == 2)
1343	    op1_sign = -1;
1344
1345	  unsigned int mprec0 = prec;
1346	  if (arg0 != error_mark_node)
1347	    mprec0 = get_min_precision (arg0, sign);
1348	  if (mprec0 <= hprec)
1349	    op0_small_p = true;
1350	  else if (!uns && mprec0 <= hprec + 1)
1351	    op0_medium_p = true;
1352	  unsigned int mprec1 = prec;
1353	  if (arg1 != error_mark_node)
1354	    mprec1 = get_min_precision (arg1, sign);
1355	  if (mprec1 <= hprec)
1356	    op1_small_p = true;
1357	  else if (!uns && mprec1 <= hprec + 1)
1358	    op1_medium_p = true;
1359
1360	  int smaller_sign = 1;
1361	  int larger_sign = 1;
1362	  if (op0_small_p)
1363	    {
1364	      smaller_sign = op0_sign;
1365	      larger_sign = op1_sign;
1366	    }
1367	  else if (op1_small_p)
1368	    {
1369	      smaller_sign = op1_sign;
1370	      larger_sign = op0_sign;
1371	    }
1372	  else if (op0_sign == op1_sign)
1373	    {
1374	      smaller_sign = op0_sign;
1375	      larger_sign = op0_sign;
1376	    }
1377
1378	  if (!op0_small_p)
1379	    do_compare_rtx_and_jump (signbit0, hipart0, NE, true, hmode,
1380				     NULL_RTX, NULL_RTX, large_op0,
1381				     PROB_UNLIKELY);
1382
1383	  if (!op1_small_p)
1384	    do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
1385				     NULL_RTX, NULL_RTX, small_op0_large_op1,
1386				     PROB_UNLIKELY);
1387
1388	  /* If both op0 and op1 are sign (!uns) or zero (uns) extended from
1389	     hmode to mode, the multiplication will never overflow.  We can
1390	     do just one hmode x hmode => mode widening multiplication.  */
1391	  rtx lopart0s = lopart0, lopart1s = lopart1;
1392	  if (GET_CODE (lopart0) == SUBREG)
1393	    {
1394	      lopart0s = shallow_copy_rtx (lopart0);
1395	      SUBREG_PROMOTED_VAR_P (lopart0s) = 1;
1396	      SUBREG_PROMOTED_SET (lopart0s, uns ? SRP_UNSIGNED : SRP_SIGNED);
1397	    }
1398	  if (GET_CODE (lopart1) == SUBREG)
1399	    {
1400	      lopart1s = shallow_copy_rtx (lopart1);
1401	      SUBREG_PROMOTED_VAR_P (lopart1s) = 1;
1402	      SUBREG_PROMOTED_SET (lopart1s, uns ? SRP_UNSIGNED : SRP_SIGNED);
1403	    }
1404	  tree halfstype = build_nonstandard_integer_type (hprec, uns);
1405	  ops.op0 = make_tree (halfstype, lopart0s);
1406	  ops.op1 = make_tree (halfstype, lopart1s);
1407	  ops.code = WIDEN_MULT_EXPR;
1408	  ops.type = type;
1409	  rtx thisres
1410	    = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1411	  emit_move_insn (res, thisres);
1412	  emit_jump (done_label);
1413
1414	  emit_label (small_op0_large_op1);
1415
1416	  /* If op0 is sign (!uns) or zero (uns) extended from hmode to mode,
1417	     but op1 is not, just swap the arguments and handle it as op1
1418	     sign/zero extended, op0 not.  */
1419	  rtx larger = gen_reg_rtx (mode);
1420	  rtx hipart = gen_reg_rtx (hmode);
1421	  rtx lopart = gen_reg_rtx (hmode);
1422	  emit_move_insn (larger, op1);
1423	  emit_move_insn (hipart, hipart1);
1424	  emit_move_insn (lopart, lopart0);
1425	  emit_jump (one_small_one_large);
1426
1427	  emit_label (large_op0);
1428
1429	  if (!op1_small_p)
1430	    do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
1431				     NULL_RTX, NULL_RTX, both_ops_large,
1432				     PROB_UNLIKELY);
1433
1434	  /* If op1 is sign (!uns) or zero (uns) extended from hmode to mode,
1435	     but op0 is not, prepare larger, hipart and lopart pseudos and
1436	     handle it together with small_op0_large_op1.  */
1437	  emit_move_insn (larger, op0);
1438	  emit_move_insn (hipart, hipart0);
1439	  emit_move_insn (lopart, lopart1);
1440
1441	  emit_label (one_small_one_large);
1442
1443	  /* lopart is the low part of the operand that is sign extended
1444	     to mode, larger is the the other operand, hipart is the
1445	     high part of larger and lopart0 and lopart1 are the low parts
1446	     of both operands.
1447	     We perform lopart0 * lopart1 and lopart * hipart widening
1448	     multiplications.  */
1449	  tree halfutype = build_nonstandard_integer_type (hprec, 1);
1450	  ops.op0 = make_tree (halfutype, lopart0);
1451	  ops.op1 = make_tree (halfutype, lopart1);
1452	  rtx lo0xlo1
1453	    = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1454
1455	  ops.op0 = make_tree (halfutype, lopart);
1456	  ops.op1 = make_tree (halfutype, hipart);
1457	  rtx loxhi = gen_reg_rtx (mode);
1458	  rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1459	  emit_move_insn (loxhi, tem);
1460
1461	  if (!uns)
1462	    {
1463	      /* if (hipart < 0) loxhi -= lopart << (bitsize / 2);  */
1464	      if (larger_sign == 0)
1465		emit_jump (after_hipart_neg);
1466	      else if (larger_sign != -1)
1467		do_compare_rtx_and_jump (hipart, const0_rtx, GE, false, hmode,
1468					 NULL_RTX, NULL_RTX, after_hipart_neg,
1469					 PROB_EVEN);
1470
1471	      tem = convert_modes (mode, hmode, lopart, 1);
1472	      tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
1473	      tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
1474					 1, OPTAB_DIRECT);
1475	      emit_move_insn (loxhi, tem);
1476
1477	      emit_label (after_hipart_neg);
1478
1479	      /* if (lopart < 0) loxhi -= larger;  */
1480	      if (smaller_sign == 0)
1481		emit_jump (after_lopart_neg);
1482	      else if (smaller_sign != -1)
1483		do_compare_rtx_and_jump (lopart, const0_rtx, GE, false, hmode,
1484					 NULL_RTX, NULL_RTX, after_lopart_neg,
1485					 PROB_EVEN);
1486
1487	      tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
1488					 1, OPTAB_DIRECT);
1489	      emit_move_insn (loxhi, tem);
1490
1491	      emit_label (after_lopart_neg);
1492	    }
1493
1494	  /* loxhi += (uns) lo0xlo1 >> (bitsize / 2);  */
1495	  tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
1496	  tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
1497				     1, OPTAB_DIRECT);
1498	  emit_move_insn (loxhi, tem);
1499
1500	  /* if (loxhi >> (bitsize / 2)
1501		 == (hmode) loxhi >> (bitsize / 2 - 1))  (if !uns)
1502	     if (loxhi >> (bitsize / 2) == 0		 (if uns).  */
1503	  rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
1504					  NULL_RTX, 0);
1505	  hipartloxhi = gen_lowpart (hmode, hipartloxhi);
1506	  rtx signbitloxhi = const0_rtx;
1507	  if (!uns)
1508	    signbitloxhi = expand_shift (RSHIFT_EXPR, hmode,
1509					 gen_lowpart (hmode, loxhi),
1510					 hprec - 1, NULL_RTX, 0);
1511
1512	  do_compare_rtx_and_jump (signbitloxhi, hipartloxhi, NE, true, hmode,
1513				   NULL_RTX, NULL_RTX, do_overflow,
1514				   PROB_VERY_UNLIKELY);
1515
1516	  /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1;  */
1517	  rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
1518					   NULL_RTX, 1);
1519	  tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
1520
1521	  tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
1522				     1, OPTAB_DIRECT);
1523	  if (tem != res)
1524	    emit_move_insn (res, tem);
1525	  emit_jump (done_label);
1526
1527	  emit_label (both_ops_large);
1528
1529	  /* If both operands are large (not sign (!uns) or zero (uns)
1530	     extended from hmode), then perform the full multiplication
1531	     which will be the result of the operation.
1532	     The only cases which don't overflow are for signed multiplication
1533	     some cases where both hipart0 and highpart1 are 0 or -1.
1534	     For unsigned multiplication when high parts are both non-zero
1535	     this overflows always.  */
1536	  ops.code = MULT_EXPR;
1537	  ops.op0 = make_tree (type, op0);
1538	  ops.op1 = make_tree (type, op1);
1539	  tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1540	  emit_move_insn (res, tem);
1541
1542	  if (!uns)
1543	    {
1544	      if (!op0_medium_p)
1545		{
1546		  tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
1547					     NULL_RTX, 1, OPTAB_DIRECT);
1548		  do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
1549					   NULL_RTX, NULL_RTX, do_error,
1550					   PROB_VERY_UNLIKELY);
1551		}
1552
1553	      if (!op1_medium_p)
1554		{
1555		  tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
1556					     NULL_RTX, 1, OPTAB_DIRECT);
1557		  do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
1558					   NULL_RTX, NULL_RTX, do_error,
1559					   PROB_VERY_UNLIKELY);
1560		}
1561
1562	      /* At this point hipart{0,1} are both in [-1, 0].  If they are
1563		 the same, overflow happened if res is negative, if they are
1564		 different, overflow happened if res is positive.  */
1565	      if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
1566		emit_jump (hipart_different);
1567	      else if (op0_sign == 1 || op1_sign == 1)
1568		do_compare_rtx_and_jump (hipart0, hipart1, NE, true, hmode,
1569					 NULL_RTX, NULL_RTX, hipart_different,
1570					 PROB_EVEN);
1571
1572	      do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode,
1573				       NULL_RTX, NULL_RTX, do_error,
1574				       PROB_VERY_UNLIKELY);
1575	      emit_jump (done_label);
1576
1577	      emit_label (hipart_different);
1578
1579	      do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode,
1580				       NULL_RTX, NULL_RTX, do_error,
1581				       PROB_VERY_UNLIKELY);
1582	      emit_jump (done_label);
1583	    }
1584
1585	  emit_label (do_overflow);
1586
1587	  /* Overflow, do full multiplication and fallthru into do_error.  */
1588	  ops.op0 = make_tree (type, op0);
1589	  ops.op1 = make_tree (type, op1);
1590	  tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1591	  emit_move_insn (res, tem);
1592	}
1593      else
1594	{
1595	  gcc_assert (!is_ubsan);
1596	  ops.code = MULT_EXPR;
1597	  ops.type = type;
1598	  res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1599	  emit_jump (done_label);
1600	}
1601    }
1602
1603 do_error_label:
1604  emit_label (do_error);
1605  if (is_ubsan)
1606    {
1607      /* Expand the ubsan builtin call.  */
1608      push_temp_slots ();
1609      fn = ubsan_build_overflow_builtin (MULT_EXPR, loc, TREE_TYPE (arg0),
1610					 arg0, arg1);
1611      expand_normal (fn);
1612      pop_temp_slots ();
1613      do_pending_stack_adjust ();
1614    }
1615  else if (lhs)
1616    write_complex_part (target, const1_rtx, true);
1617
1618  /* We're done.  */
1619  emit_label (done_label);
1620
1621  /* u1 * u2 -> sr  */
1622  if (uns0_p && uns1_p && !unsr_p)
1623    {
1624      rtx_code_label *all_done_label = gen_label_rtx ();
1625      do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
1626			       NULL_RTX, all_done_label, PROB_VERY_LIKELY);
1627      write_complex_part (target, const1_rtx, true);
1628      emit_label (all_done_label);
1629    }
1630
1631  /* s1 * u2 -> sr  */
1632  if (!uns0_p && uns1_p && !unsr_p && pos_neg1 == 3)
1633    {
1634      rtx_code_label *all_done_label = gen_label_rtx ();
1635      rtx_code_label *set_noovf = gen_label_rtx ();
1636      do_compare_rtx_and_jump (op1, const0_rtx, GE, false, mode, NULL_RTX,
1637			       NULL_RTX, all_done_label, PROB_VERY_LIKELY);
1638      write_complex_part (target, const1_rtx, true);
1639      do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
1640			       NULL_RTX, set_noovf, PROB_VERY_LIKELY);
1641      do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
1642			       NULL_RTX, all_done_label, PROB_VERY_UNLIKELY);
1643      do_compare_rtx_and_jump (op1, res, NE, true, mode, NULL_RTX, NULL_RTX,
1644			       all_done_label, PROB_VERY_UNLIKELY);
1645      emit_label (set_noovf);
1646      write_complex_part (target, const0_rtx, true);
1647      emit_label (all_done_label);
1648    }
1649
1650  if (lhs)
1651    {
1652      if (is_ubsan)
1653	expand_ubsan_result_store (target, res);
1654      else
1655	expand_arith_overflow_result_store (lhs, target, mode, res);
1656    }
1657}
1658
1659/* Expand UBSAN_CHECK_ADD call STMT.  */
1660
1661static void
1662expand_UBSAN_CHECK_ADD (gcall *stmt)
1663{
1664  location_t loc = gimple_location (stmt);
1665  tree lhs = gimple_call_lhs (stmt);
1666  tree arg0 = gimple_call_arg (stmt, 0);
1667  tree arg1 = gimple_call_arg (stmt, 1);
1668  expand_addsub_overflow (loc, PLUS_EXPR, lhs, arg0, arg1,
1669			  false, false, false, true);
1670}
1671
1672/* Expand UBSAN_CHECK_SUB call STMT.  */
1673
1674static void
1675expand_UBSAN_CHECK_SUB (gcall *stmt)
1676{
1677  location_t loc = gimple_location (stmt);
1678  tree lhs = gimple_call_lhs (stmt);
1679  tree arg0 = gimple_call_arg (stmt, 0);
1680  tree arg1 = gimple_call_arg (stmt, 1);
1681  if (integer_zerop (arg0))
1682    expand_neg_overflow (loc, lhs, arg1, true);
1683  else
1684    expand_addsub_overflow (loc, MINUS_EXPR, lhs, arg0, arg1,
1685			    false, false, false, true);
1686}
1687
1688/* Expand UBSAN_CHECK_MUL call STMT.  */
1689
1690static void
1691expand_UBSAN_CHECK_MUL (gcall *stmt)
1692{
1693  location_t loc = gimple_location (stmt);
1694  tree lhs = gimple_call_lhs (stmt);
1695  tree arg0 = gimple_call_arg (stmt, 0);
1696  tree arg1 = gimple_call_arg (stmt, 1);
1697  expand_mul_overflow (loc, lhs, arg0, arg1, false, false, false, true);
1698}
1699
1700/* Helper function for {ADD,SUB,MUL}_OVERFLOW call stmt expansion.  */
1701
1702static void
1703expand_arith_overflow (enum tree_code code, gimple stmt)
1704{
1705  tree lhs = gimple_call_lhs (stmt);
1706  if (lhs == NULL_TREE)
1707    return;
1708  tree arg0 = gimple_call_arg (stmt, 0);
1709  tree arg1 = gimple_call_arg (stmt, 1);
1710  tree type = TREE_TYPE (TREE_TYPE (lhs));
1711  int uns0_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
1712  int uns1_p = TYPE_UNSIGNED (TREE_TYPE (arg1));
1713  int unsr_p = TYPE_UNSIGNED (type);
1714  int prec0 = TYPE_PRECISION (TREE_TYPE (arg0));
1715  int prec1 = TYPE_PRECISION (TREE_TYPE (arg1));
1716  int precres = TYPE_PRECISION (type);
1717  location_t loc = gimple_location (stmt);
1718  if (!uns0_p && get_range_pos_neg (arg0) == 1)
1719    uns0_p = true;
1720  if (!uns1_p && get_range_pos_neg (arg1) == 1)
1721    uns1_p = true;
1722  int pr = get_min_precision (arg0, uns0_p ? UNSIGNED : SIGNED);
1723  prec0 = MIN (prec0, pr);
1724  pr = get_min_precision (arg1, uns1_p ? UNSIGNED : SIGNED);
1725  prec1 = MIN (prec1, pr);
1726
1727  /* If uns0_p && uns1_p, precop is minimum needed precision
1728     of unsigned type to hold the exact result, otherwise
1729     precop is minimum needed precision of signed type to
1730     hold the exact result.  */
1731  int precop;
1732  if (code == MULT_EXPR)
1733    precop = prec0 + prec1 + (uns0_p != uns1_p);
1734  else
1735    {
1736      if (uns0_p == uns1_p)
1737	precop = MAX (prec0, prec1) + 1;
1738      else if (uns0_p)
1739	precop = MAX (prec0 + 1, prec1) + 1;
1740      else
1741	precop = MAX (prec0, prec1 + 1) + 1;
1742    }
1743  int orig_precres = precres;
1744
1745  do
1746    {
1747      if ((uns0_p && uns1_p)
1748	  ? ((precop + !unsr_p) <= precres
1749	     /* u1 - u2 -> ur can overflow, no matter what precision
1750		the result has.  */
1751	     && (code != MINUS_EXPR || !unsr_p))
1752	  : (!unsr_p && precop <= precres))
1753	{
1754	  /* The infinity precision result will always fit into result.  */
1755	  rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1756	  write_complex_part (target, const0_rtx, true);
1757	  enum machine_mode mode = TYPE_MODE (type);
1758	  struct separate_ops ops;
1759	  ops.code = code;
1760	  ops.type = type;
1761	  ops.op0 = fold_convert_loc (loc, type, arg0);
1762	  ops.op1 = fold_convert_loc (loc, type, arg1);
1763	  ops.op2 = NULL_TREE;
1764	  ops.location = loc;
1765	  rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1766	  expand_arith_overflow_result_store (lhs, target, mode, tem);
1767	  return;
1768	}
1769
1770#ifdef WORD_REGISTER_OPERATIONS
1771      /* For sub-word operations, if target doesn't have them, start
1772	 with precres widening right away, otherwise do it only
1773	 if the most simple cases can't be used.  */
1774      if (orig_precres == precres && precres < BITS_PER_WORD)
1775	;
1776      else
1777#endif
1778      if ((uns0_p && uns1_p && unsr_p && prec0 <= precres && prec1 <= precres)
1779	  || ((!uns0_p || !uns1_p) && !unsr_p
1780	      && prec0 + uns0_p <= precres
1781	      && prec1 + uns1_p <= precres))
1782	{
1783	  arg0 = fold_convert_loc (loc, type, arg0);
1784	  arg1 = fold_convert_loc (loc, type, arg1);
1785	  switch (code)
1786	    {
1787	    case MINUS_EXPR:
1788	      if (integer_zerop (arg0) && !unsr_p)
1789		expand_neg_overflow (loc, lhs, arg1, false);
1790	      /* FALLTHRU */
1791	    case PLUS_EXPR:
1792	      expand_addsub_overflow (loc, code, lhs, arg0, arg1,
1793				      unsr_p, unsr_p, unsr_p, false);
1794	      return;
1795	    case MULT_EXPR:
1796	      expand_mul_overflow (loc, lhs, arg0, arg1,
1797				   unsr_p, unsr_p, unsr_p, false);
1798	      return;
1799	    default:
1800	      gcc_unreachable ();
1801	    }
1802	}
1803
1804      /* For sub-word operations, retry with a wider type first.  */
1805      if (orig_precres == precres && precop <= BITS_PER_WORD)
1806	{
1807#ifdef WORD_REGISTER_OPERATIONS
1808	  int p = BITS_PER_WORD;
1809#else
1810	  int p = precop;
1811#endif
1812	  enum machine_mode m = smallest_mode_for_size (p, MODE_INT);
1813	  tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
1814							uns0_p && uns1_p
1815							&& unsr_p);
1816	  p = TYPE_PRECISION (optype);
1817	  if (p > precres)
1818	    {
1819	      precres = p;
1820	      unsr_p = TYPE_UNSIGNED (optype);
1821	      type = optype;
1822	      continue;
1823	    }
1824	}
1825
1826      if (prec0 <= precres && prec1 <= precres)
1827	{
1828	  tree types[2];
1829	  if (unsr_p)
1830	    {
1831	      types[0] = build_nonstandard_integer_type (precres, 0);
1832	      types[1] = type;
1833	    }
1834	  else
1835	    {
1836	      types[0] = type;
1837	      types[1] = build_nonstandard_integer_type (precres, 1);
1838	    }
1839	  arg0 = fold_convert_loc (loc, types[uns0_p], arg0);
1840	  arg1 = fold_convert_loc (loc, types[uns1_p], arg1);
1841	  if (code != MULT_EXPR)
1842	    expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
1843				    uns0_p, uns1_p, false);
1844	  else
1845	    expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
1846				 uns0_p, uns1_p, false);
1847	  return;
1848	}
1849
1850      /* Retry with a wider type.  */
1851      if (orig_precres == precres)
1852	{
1853	  int p = MAX (prec0, prec1);
1854	  enum machine_mode m = smallest_mode_for_size (p, MODE_INT);
1855	  tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
1856							uns0_p && uns1_p
1857							&& unsr_p);
1858	  p = TYPE_PRECISION (optype);
1859	  if (p > precres)
1860	    {
1861	      precres = p;
1862	      unsr_p = TYPE_UNSIGNED (optype);
1863	      type = optype;
1864	      continue;
1865	    }
1866	}
1867
1868      gcc_unreachable ();
1869    }
1870  while (1);
1871}
1872
1873/* Expand ADD_OVERFLOW STMT.  */
1874
1875static void
1876expand_ADD_OVERFLOW (gcall *stmt)
1877{
1878  expand_arith_overflow (PLUS_EXPR, stmt);
1879}
1880
1881/* Expand SUB_OVERFLOW STMT.  */
1882
1883static void
1884expand_SUB_OVERFLOW (gcall *stmt)
1885{
1886  expand_arith_overflow (MINUS_EXPR, stmt);
1887}
1888
1889/* Expand MUL_OVERFLOW STMT.  */
1890
1891static void
1892expand_MUL_OVERFLOW (gcall *stmt)
1893{
1894  expand_arith_overflow (MULT_EXPR, stmt);
1895}
1896
1897/* This should get folded in tree-vectorizer.c.  */
1898
1899static void
1900expand_LOOP_VECTORIZED (gcall *)
1901{
1902  gcc_unreachable ();
1903}
1904
1905static void
1906expand_MASK_LOAD (gcall *stmt)
1907{
1908  struct expand_operand ops[3];
1909  tree type, lhs, rhs, maskt;
1910  rtx mem, target, mask;
1911
1912  maskt = gimple_call_arg (stmt, 2);
1913  lhs = gimple_call_lhs (stmt);
1914  if (lhs == NULL_TREE)
1915    return;
1916  type = TREE_TYPE (lhs);
1917  rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
1918		     gimple_call_arg (stmt, 1));
1919
1920  mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1921  gcc_assert (MEM_P (mem));
1922  mask = expand_normal (maskt);
1923  target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1924  create_output_operand (&ops[0], target, TYPE_MODE (type));
1925  create_fixed_operand (&ops[1], mem);
1926  create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
1927  expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
1928}
1929
1930static void
1931expand_MASK_STORE (gcall *stmt)
1932{
1933  struct expand_operand ops[3];
1934  tree type, lhs, rhs, maskt;
1935  rtx mem, reg, mask;
1936
1937  maskt = gimple_call_arg (stmt, 2);
1938  rhs = gimple_call_arg (stmt, 3);
1939  type = TREE_TYPE (rhs);
1940  lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
1941		     gimple_call_arg (stmt, 1));
1942
1943  mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1944  gcc_assert (MEM_P (mem));
1945  mask = expand_normal (maskt);
1946  reg = expand_normal (rhs);
1947  create_fixed_operand (&ops[0], mem);
1948  create_input_operand (&ops[1], reg, TYPE_MODE (type));
1949  create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
1950  expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
1951}
1952
1953static void
1954expand_ABNORMAL_DISPATCHER (gcall *)
1955{
1956}
1957
1958static void
1959expand_BUILTIN_EXPECT (gcall *stmt)
1960{
1961  /* When guessing was done, the hints should be already stripped away.  */
1962  gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
1963
1964  rtx target;
1965  tree lhs = gimple_call_lhs (stmt);
1966  if (lhs)
1967    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1968  else
1969    target = const0_rtx;
1970  rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
1971  if (lhs && val != target)
1972    emit_move_insn (target, val);
1973}
1974
1975/* Routines to expand each internal function, indexed by function number.
1976   Each routine has the prototype:
1977
1978       expand_<NAME> (gcall *stmt)
1979
1980   where STMT is the statement that performs the call. */
1981static void (*const internal_fn_expanders[]) (gcall *) = {
1982#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
1983#include "internal-fn.def"
1984#undef DEF_INTERNAL_FN
1985  0
1986};
1987
1988/* Expand STMT, which is a call to internal function FN.  */
1989
1990void
1991expand_internal_call (gcall *stmt)
1992{
1993  internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
1994}
1995