1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "machmode.h"
25#include "rtl.h"
26#include "hash-set.h"
27#include "vec.h"
28#include "double-int.h"
29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"
35#include "fold-const.h"
36#include "stringpool.h"
37#include "stor-layout.h"
38#include "attribs.h"
39#include "varasm.h"
40#include "flags.h"
41#include "regs.h"
42#include "hard-reg-set.h"
43#include "except.h"
44#include "function.h"
45#include "insn-config.h"
46#include "insn-attr.h"
47#include "hashtab.h"
48#include "statistics.h"
49#include "real.h"
50#include "fixed-value.h"
51#include "expmed.h"
52#include "dojump.h"
53#include "explow.h"
54#include "calls.h"
55#include "emit-rtl.h"
56#include "stmt.h"
57/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
58#include "expr.h"
59#include "insn-codes.h"
60#include "optabs.h"
61#include "libfuncs.h"
62#include "recog.h"
63#include "reload.h"
64#include "typeclass.h"
65#include "toplev.h"
66#include "langhooks.h"
67#include "intl.h"
68#include "tm_p.h"
69#include "tree-iterator.h"
70#include "predict.h"
71#include "dominance.h"
72#include "cfg.h"
73#include "basic-block.h"
74#include "tree-ssa-alias.h"
75#include "internal-fn.h"
76#include "gimple-expr.h"
77#include "is-a.h"
78#include "gimple.h"
79#include "gimple-ssa.h"
80#include "hash-map.h"
81#include "plugin-api.h"
82#include "ipa-ref.h"
83#include "cgraph.h"
84#include "tree-ssanames.h"
85#include "target.h"
86#include "common/common-target.h"
87#include "timevar.h"
88#include "df.h"
89#include "diagnostic.h"
90#include "tree-ssa-live.h"
91#include "tree-outof-ssa.h"
92#include "target-globals.h"
93#include "params.h"
94#include "tree-ssa-address.h"
95#include "cfgexpand.h"
96#include "builtins.h"
97#include "tree-chkp.h"
98#include "rtl-chkp.h"
99#include "ccmp.h"
100
101#ifndef STACK_PUSH_CODE
102#ifdef STACK_GROWS_DOWNWARD
103#define STACK_PUSH_CODE PRE_DEC
104#else
105#define STACK_PUSH_CODE PRE_INC
106#endif
107#endif
108
109
110/* If this is nonzero, we do not bother generating VOLATILE
111   around volatile memory references, and we are willing to
112   output indirect addresses.  If cse is to follow, we reject
113   indirect addresses so a useful potential cse is generated;
114   if it is used only once, instruction combination will produce
115   the same indirect address eventually.  */
116int cse_not_expected;
117
118/* This structure is used by move_by_pieces to describe the move to
119   be performed.  */
120struct move_by_pieces_d
121{
122  rtx to;
123  rtx to_addr;
124  int autinc_to;
125  int explicit_inc_to;
126  rtx from;
127  rtx from_addr;
128  int autinc_from;
129  int explicit_inc_from;
130  unsigned HOST_WIDE_INT len;
131  HOST_WIDE_INT offset;
132  int reverse;
133};
134
135/* This structure is used by store_by_pieces to describe the clear to
136   be performed.  */
137
138struct store_by_pieces_d
139{
140  rtx to;
141  rtx to_addr;
142  int autinc_to;
143  int explicit_inc_to;
144  unsigned HOST_WIDE_INT len;
145  HOST_WIDE_INT offset;
146  rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
147  void *constfundata;
148  int reverse;
149};
150
151static void move_by_pieces_1 (insn_gen_fn, machine_mode,
152			      struct move_by_pieces_d *);
153static bool block_move_libcall_safe_for_call_parm (void);
154static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
155					unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
156					unsigned HOST_WIDE_INT);
157static tree emit_block_move_libcall_fn (int);
158static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
159static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
160static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
161static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
162static void store_by_pieces_2 (insn_gen_fn, machine_mode,
163			       struct store_by_pieces_d *);
164static tree clear_storage_libcall_fn (int);
165static rtx_insn *compress_float_constant (rtx, rtx);
166static rtx get_subtarget (rtx);
167static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
168				     HOST_WIDE_INT, machine_mode,
169				     tree, int, alias_set_type);
170static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
171static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
172			unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
173			machine_mode, tree, alias_set_type, bool);
174
175static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
176
177static int is_aligning_offset (const_tree, const_tree);
178static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
179static rtx do_store_flag (sepops, rtx, machine_mode);
180#ifdef PUSH_ROUNDING
181static void emit_single_push_insn (machine_mode, rtx, tree);
182#endif
183static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
184static rtx const_vector_from_tree (tree);
185static tree tree_expr_size (const_tree);
186static HOST_WIDE_INT int_expr_size (tree);
187
188
189/* This is run to set up which modes can be used
190   directly in memory and to initialize the block move optab.  It is run
191   at the beginning of compilation and when the target is reinitialized.  */
192
193void
194init_expr_target (void)
195{
196  rtx insn, pat;
197  machine_mode mode;
198  int num_clobbers;
199  rtx mem, mem1;
200  rtx reg;
201
202  /* Try indexing by frame ptr and try by stack ptr.
203     It is known that on the Convex the stack ptr isn't a valid index.
204     With luck, one or the other is valid on any machine.  */
205  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
206  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
207
208  /* A scratch register we can modify in-place below to avoid
209     useless RTL allocations.  */
210  reg = gen_rtx_REG (VOIDmode, -1);
211
212  insn = rtx_alloc (INSN);
213  pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
214  PATTERN (insn) = pat;
215
216  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
217       mode = (machine_mode) ((int) mode + 1))
218    {
219      int regno;
220
221      direct_load[(int) mode] = direct_store[(int) mode] = 0;
222      PUT_MODE (mem, mode);
223      PUT_MODE (mem1, mode);
224      PUT_MODE (reg, mode);
225
226      /* See if there is some register that can be used in this mode and
227	 directly loaded or stored from memory.  */
228
229      if (mode != VOIDmode && mode != BLKmode)
230	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
231	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
232	     regno++)
233	  {
234	    if (! HARD_REGNO_MODE_OK (regno, mode))
235	      continue;
236
237	    SET_REGNO (reg, regno);
238
239	    SET_SRC (pat) = mem;
240	    SET_DEST (pat) = reg;
241	    if (recog (pat, insn, &num_clobbers) >= 0)
242	      direct_load[(int) mode] = 1;
243
244	    SET_SRC (pat) = mem1;
245	    SET_DEST (pat) = reg;
246	    if (recog (pat, insn, &num_clobbers) >= 0)
247	      direct_load[(int) mode] = 1;
248
249	    SET_SRC (pat) = reg;
250	    SET_DEST (pat) = mem;
251	    if (recog (pat, insn, &num_clobbers) >= 0)
252	      direct_store[(int) mode] = 1;
253
254	    SET_SRC (pat) = reg;
255	    SET_DEST (pat) = mem1;
256	    if (recog (pat, insn, &num_clobbers) >= 0)
257	      direct_store[(int) mode] = 1;
258	  }
259    }
260
261  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
262
263  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
264       mode = GET_MODE_WIDER_MODE (mode))
265    {
266      machine_mode srcmode;
267      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
268	   srcmode = GET_MODE_WIDER_MODE (srcmode))
269	{
270	  enum insn_code ic;
271
272	  ic = can_extend_p (mode, srcmode, 0);
273	  if (ic == CODE_FOR_nothing)
274	    continue;
275
276	  PUT_MODE (mem, srcmode);
277
278	  if (insn_operand_matches (ic, 1, mem))
279	    float_extend_from_mem[mode][srcmode] = true;
280	}
281    }
282}
283
284/* This is run at the start of compiling a function.  */
285
286void
287init_expr (void)
288{
289  memset (&crtl->expr, 0, sizeof (crtl->expr));
290}
291
292/* Copy data from FROM to TO, where the machine modes are not the same.
293   Both modes may be integer, or both may be floating, or both may be
294   fixed-point.
295   UNSIGNEDP should be nonzero if FROM is an unsigned type.
296   This causes zero-extension instead of sign-extension.  */
297
298void
299convert_move (rtx to, rtx from, int unsignedp)
300{
301  machine_mode to_mode = GET_MODE (to);
302  machine_mode from_mode = GET_MODE (from);
303  int to_real = SCALAR_FLOAT_MODE_P (to_mode);
304  int from_real = SCALAR_FLOAT_MODE_P (from_mode);
305  enum insn_code code;
306  rtx libcall;
307
308  /* rtx code for making an equivalent value.  */
309  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
310			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
311
312
313  gcc_assert (to_real == from_real);
314  gcc_assert (to_mode != BLKmode);
315  gcc_assert (from_mode != BLKmode);
316
317  /* If the source and destination are already the same, then there's
318     nothing to do.  */
319  if (to == from)
320    return;
321
322  /* If FROM is a SUBREG that indicates that we have already done at least
323     the required extension, strip it.  We don't handle such SUBREGs as
324     TO here.  */
325
326  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
327      && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
328	  >= GET_MODE_PRECISION (to_mode))
329      && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
330    from = gen_lowpart (to_mode, from), from_mode = to_mode;
331
332  gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
333
334  if (to_mode == from_mode
335      || (from_mode == VOIDmode && CONSTANT_P (from)))
336    {
337      emit_move_insn (to, from);
338      return;
339    }
340
341  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
342    {
343      gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
344
345      if (VECTOR_MODE_P (to_mode))
346	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
347      else
348	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
349
350      emit_move_insn (to, from);
351      return;
352    }
353
354  if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
355    {
356      convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
357      convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
358      return;
359    }
360
361  if (to_real)
362    {
363      rtx value;
364      rtx_insn *insns;
365      convert_optab tab;
366
367      gcc_assert ((GET_MODE_PRECISION (from_mode)
368		   != GET_MODE_PRECISION (to_mode))
369		  || (DECIMAL_FLOAT_MODE_P (from_mode)
370		      != DECIMAL_FLOAT_MODE_P (to_mode)));
371
372      if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
373	/* Conversion between decimal float and binary float, same size.  */
374	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
375      else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
376	tab = sext_optab;
377      else
378	tab = trunc_optab;
379
380      /* Try converting directly if the insn is supported.  */
381
382      code = convert_optab_handler (tab, to_mode, from_mode);
383      if (code != CODE_FOR_nothing)
384	{
385	  emit_unop_insn (code, to, from,
386			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
387	  return;
388	}
389
390      /* Otherwise use a libcall.  */
391      libcall = convert_optab_libfunc (tab, to_mode, from_mode);
392
393      /* Is this conversion implemented yet?  */
394      gcc_assert (libcall);
395
396      start_sequence ();
397      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
398				       1, from, from_mode);
399      insns = get_insns ();
400      end_sequence ();
401      emit_libcall_block (insns, to, value,
402			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
403								       from)
404			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
405      return;
406    }
407
408  /* Handle pointer conversion.  */			/* SPEE 900220.  */
409  /* If the target has a converter from FROM_MODE to TO_MODE, use it.  */
410  {
411    convert_optab ctab;
412
413    if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
414      ctab = trunc_optab;
415    else if (unsignedp)
416      ctab = zext_optab;
417    else
418      ctab = sext_optab;
419
420    if (convert_optab_handler (ctab, to_mode, from_mode)
421	!= CODE_FOR_nothing)
422      {
423	emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
424			to, from, UNKNOWN);
425	return;
426      }
427  }
428
429  /* Targets are expected to provide conversion insns between PxImode and
430     xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
431  if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
432    {
433      machine_mode full_mode
434	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
435
436      gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
437		  != CODE_FOR_nothing);
438
439      if (full_mode != from_mode)
440	from = convert_to_mode (full_mode, from, unsignedp);
441      emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
442		      to, from, UNKNOWN);
443      return;
444    }
445  if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
446    {
447      rtx new_from;
448      machine_mode full_mode
449	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
450      convert_optab ctab = unsignedp ? zext_optab : sext_optab;
451      enum insn_code icode;
452
453      icode = convert_optab_handler (ctab, full_mode, from_mode);
454      gcc_assert (icode != CODE_FOR_nothing);
455
456      if (to_mode == full_mode)
457	{
458	  emit_unop_insn (icode, to, from, UNKNOWN);
459	  return;
460	}
461
462      new_from = gen_reg_rtx (full_mode);
463      emit_unop_insn (icode, new_from, from, UNKNOWN);
464
465      /* else proceed to integer conversions below.  */
466      from_mode = full_mode;
467      from = new_from;
468    }
469
470   /* Make sure both are fixed-point modes or both are not.  */
471   gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
472	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
473   if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
474    {
475      /* If we widen from_mode to to_mode and they are in the same class,
476	 we won't saturate the result.
477	 Otherwise, always saturate the result to play safe.  */
478      if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
479	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
480	expand_fixed_convert (to, from, 0, 0);
481      else
482	expand_fixed_convert (to, from, 0, 1);
483      return;
484    }
485
486  /* Now both modes are integers.  */
487
488  /* Handle expanding beyond a word.  */
489  if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
490      && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
491    {
492      rtx_insn *insns;
493      rtx lowpart;
494      rtx fill_value;
495      rtx lowfrom;
496      int i;
497      machine_mode lowpart_mode;
498      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
499
500      /* Try converting directly if the insn is supported.  */
501      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
502	  != CODE_FOR_nothing)
503	{
504	  /* If FROM is a SUBREG, put it into a register.  Do this
505	     so that we always generate the same set of insns for
506	     better cse'ing; if an intermediate assignment occurred,
507	     we won't be doing the operation directly on the SUBREG.  */
508	  if (optimize > 0 && GET_CODE (from) == SUBREG)
509	    from = force_reg (from_mode, from);
510	  emit_unop_insn (code, to, from, equiv_code);
511	  return;
512	}
513      /* Next, try converting via full word.  */
514      else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
515	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
516		   != CODE_FOR_nothing))
517	{
518	  rtx word_to = gen_reg_rtx (word_mode);
519	  if (REG_P (to))
520	    {
521	      if (reg_overlap_mentioned_p (to, from))
522		from = force_reg (from_mode, from);
523	      emit_clobber (to);
524	    }
525	  convert_move (word_to, from, unsignedp);
526	  emit_unop_insn (code, to, word_to, equiv_code);
527	  return;
528	}
529
530      /* No special multiword conversion insn; do it by hand.  */
531      start_sequence ();
532
533      /* Since we will turn this into a no conflict block, we must ensure the
534         the source does not overlap the target so force it into an isolated
535         register when maybe so.  Likewise for any MEM input, since the
536         conversion sequence might require several references to it and we
537         must ensure we're getting the same value every time.  */
538
539      if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
540	from = force_reg (from_mode, from);
541
542      /* Get a copy of FROM widened to a word, if necessary.  */
543      if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
544	lowpart_mode = word_mode;
545      else
546	lowpart_mode = from_mode;
547
548      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
549
550      lowpart = gen_lowpart (lowpart_mode, to);
551      emit_move_insn (lowpart, lowfrom);
552
553      /* Compute the value to put in each remaining word.  */
554      if (unsignedp)
555	fill_value = const0_rtx;
556      else
557	fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
558					    LT, lowfrom, const0_rtx,
559					    lowpart_mode, 0, -1);
560
561      /* Fill the remaining words.  */
562      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
563	{
564	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
565	  rtx subword = operand_subword (to, index, 1, to_mode);
566
567	  gcc_assert (subword);
568
569	  if (fill_value != subword)
570	    emit_move_insn (subword, fill_value);
571	}
572
573      insns = get_insns ();
574      end_sequence ();
575
576      emit_insn (insns);
577      return;
578    }
579
580  /* Truncating multi-word to a word or less.  */
581  if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
582      && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
583    {
584      if (!((MEM_P (from)
585	     && ! MEM_VOLATILE_P (from)
586	     && direct_load[(int) to_mode]
587	     && ! mode_dependent_address_p (XEXP (from, 0),
588					    MEM_ADDR_SPACE (from)))
589	    || REG_P (from)
590	    || GET_CODE (from) == SUBREG))
591	from = force_reg (from_mode, from);
592      convert_move (to, gen_lowpart (word_mode, from), 0);
593      return;
594    }
595
596  /* Now follow all the conversions between integers
597     no more than a word long.  */
598
599  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
600  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
601      && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
602    {
603      if (!((MEM_P (from)
604	     && ! MEM_VOLATILE_P (from)
605	     && direct_load[(int) to_mode]
606	     && ! mode_dependent_address_p (XEXP (from, 0),
607					    MEM_ADDR_SPACE (from)))
608	    || REG_P (from)
609	    || GET_CODE (from) == SUBREG))
610	from = force_reg (from_mode, from);
611      if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
612	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
613	from = copy_to_reg (from);
614      emit_move_insn (to, gen_lowpart (to_mode, from));
615      return;
616    }
617
618  /* Handle extension.  */
619  if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
620    {
621      /* Convert directly if that works.  */
622      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
623	  != CODE_FOR_nothing)
624	{
625	  emit_unop_insn (code, to, from, equiv_code);
626	  return;
627	}
628      else
629	{
630	  machine_mode intermediate;
631	  rtx tmp;
632	  int shift_amount;
633
634	  /* Search for a mode to convert via.  */
635	  for (intermediate = from_mode; intermediate != VOIDmode;
636	       intermediate = GET_MODE_WIDER_MODE (intermediate))
637	    if (((can_extend_p (to_mode, intermediate, unsignedp)
638		  != CODE_FOR_nothing)
639		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
640		     && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
641		&& (can_extend_p (intermediate, from_mode, unsignedp)
642		    != CODE_FOR_nothing))
643	      {
644		convert_move (to, convert_to_mode (intermediate, from,
645						   unsignedp), unsignedp);
646		return;
647	      }
648
649	  /* No suitable intermediate mode.
650	     Generate what we need with	shifts.  */
651	  shift_amount = (GET_MODE_PRECISION (to_mode)
652			  - GET_MODE_PRECISION (from_mode));
653	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
654	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
655			      to, unsignedp);
656	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
657			      to, unsignedp);
658	  if (tmp != to)
659	    emit_move_insn (to, tmp);
660	  return;
661	}
662    }
663
664  /* Support special truncate insns for certain modes.  */
665  if (convert_optab_handler (trunc_optab, to_mode,
666			     from_mode) != CODE_FOR_nothing)
667    {
668      emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
669		      to, from, UNKNOWN);
670      return;
671    }
672
673  /* Handle truncation of volatile memrefs, and so on;
674     the things that couldn't be truncated directly,
675     and for which there was no special instruction.
676
677     ??? Code above formerly short-circuited this, for most integer
678     mode pairs, with a force_reg in from_mode followed by a recursive
679     call to this routine.  Appears always to have been wrong.  */
680  if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
681    {
682      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
683      emit_move_insn (to, temp);
684      return;
685    }
686
687  /* Mode combination is not recognized.  */
688  gcc_unreachable ();
689}
690
691/* Return an rtx for a value that would result
692   from converting X to mode MODE.
693   Both X and MODE may be floating, or both integer.
694   UNSIGNEDP is nonzero if X is an unsigned value.
695   This can be done by referring to a part of X in place
696   or by copying to a new temporary with conversion.  */
697
698rtx
699convert_to_mode (machine_mode mode, rtx x, int unsignedp)
700{
701  return convert_modes (mode, VOIDmode, x, unsignedp);
702}
703
704/* Return an rtx for a value that would result
705   from converting X from mode OLDMODE to mode MODE.
706   Both modes may be floating, or both integer.
707   UNSIGNEDP is nonzero if X is an unsigned value.
708
709   This can be done by referring to a part of X in place
710   or by copying to a new temporary with conversion.
711
712   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
713
714rtx
715convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
716{
717  rtx temp;
718
719  /* If FROM is a SUBREG that indicates that we have already done at least
720     the required extension, strip it.  */
721
722  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
723      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
724      && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
725    x = gen_lowpart (mode, SUBREG_REG (x));
726
727  if (GET_MODE (x) != VOIDmode)
728    oldmode = GET_MODE (x);
729
730  if (mode == oldmode)
731    return x;
732
733  if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
734    {
735      /* If the caller did not tell us the old mode, then there is not
736	 much to do with respect to canonicalization.  We have to
737	 assume that all the bits are significant.  */
738      if (GET_MODE_CLASS (oldmode) != MODE_INT)
739	oldmode = MAX_MODE_INT;
740      wide_int w = wide_int::from (std::make_pair (x, oldmode),
741				   GET_MODE_PRECISION (mode),
742				   unsignedp ? UNSIGNED : SIGNED);
743      return immed_wide_int_const (w, mode);
744    }
745
746  /* We can do this with a gen_lowpart if both desired and current modes
747     are integer, and this is either a constant integer, a register, or a
748     non-volatile MEM. */
749  if (GET_MODE_CLASS (mode) == MODE_INT
750      && GET_MODE_CLASS (oldmode) == MODE_INT
751      && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
752      && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
753          || (REG_P (x)
754              && (!HARD_REGISTER_P (x)
755                  || HARD_REGNO_MODE_OK (REGNO (x), mode))
756              && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
757
758   return gen_lowpart (mode, x);
759
760  /* Converting from integer constant into mode is always equivalent to an
761     subreg operation.  */
762  if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
763    {
764      gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
765      return simplify_gen_subreg (mode, x, oldmode, 0);
766    }
767
768  temp = gen_reg_rtx (mode);
769  convert_move (temp, x, unsignedp);
770  return temp;
771}
772
773/* Return the largest alignment we can use for doing a move (or store)
774   of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
775
776static unsigned int
777alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
778{
779  machine_mode tmode;
780
781  tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
782  if (align >= GET_MODE_ALIGNMENT (tmode))
783    align = GET_MODE_ALIGNMENT (tmode);
784  else
785    {
786      machine_mode tmode, xmode;
787
788      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
789	   tmode != VOIDmode;
790	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
791	if (GET_MODE_SIZE (tmode) > max_pieces
792	    || SLOW_UNALIGNED_ACCESS (tmode, align))
793	  break;
794
795      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
796    }
797
798  return align;
799}
800
801/* Return the widest integer mode no wider than SIZE.  If no such mode
802   can be found, return VOIDmode.  */
803
804static machine_mode
805widest_int_mode_for_size (unsigned int size)
806{
807  machine_mode tmode, mode = VOIDmode;
808
809  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
810       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
811    if (GET_MODE_SIZE (tmode) < size)
812      mode = tmode;
813
814  return mode;
815}
816
817/* Determine whether the LEN bytes can be moved by using several move
818   instructions.  Return nonzero if a call to move_by_pieces should
819   succeed.  */
820
821int
822can_move_by_pieces (unsigned HOST_WIDE_INT len,
823		    unsigned int align)
824{
825  return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
826						 optimize_insn_for_speed_p ());
827}
828
829/* Generate several move instructions to copy LEN bytes from block FROM to
830   block TO.  (These are MEM rtx's with BLKmode).
831
832   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
833   used to push FROM to the stack.
834
835   ALIGN is maximum stack alignment we can assume.
836
837   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
838   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
839   stpcpy.  */
840
841rtx
842move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
843		unsigned int align, int endp)
844{
845  struct move_by_pieces_d data;
846  machine_mode to_addr_mode;
847  machine_mode from_addr_mode = get_address_mode (from);
848  rtx to_addr, from_addr = XEXP (from, 0);
849  unsigned int max_size = MOVE_MAX_PIECES + 1;
850  enum insn_code icode;
851
852  align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
853
854  data.offset = 0;
855  data.from_addr = from_addr;
856  if (to)
857    {
858      to_addr_mode = get_address_mode (to);
859      to_addr = XEXP (to, 0);
860      data.to = to;
861      data.autinc_to
862	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
863	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
864      data.reverse
865	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
866    }
867  else
868    {
869      to_addr_mode = VOIDmode;
870      to_addr = NULL_RTX;
871      data.to = NULL_RTX;
872      data.autinc_to = 1;
873#ifdef STACK_GROWS_DOWNWARD
874      data.reverse = 1;
875#else
876      data.reverse = 0;
877#endif
878    }
879  data.to_addr = to_addr;
880  data.from = from;
881  data.autinc_from
882    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
883       || GET_CODE (from_addr) == POST_INC
884       || GET_CODE (from_addr) == POST_DEC);
885
886  data.explicit_inc_from = 0;
887  data.explicit_inc_to = 0;
888  if (data.reverse) data.offset = len;
889  data.len = len;
890
891  /* If copying requires more than two move insns,
892     copy addresses to registers (to make displacements shorter)
893     and use post-increment if available.  */
894  if (!(data.autinc_from && data.autinc_to)
895      && move_by_pieces_ninsns (len, align, max_size) > 2)
896    {
897      /* Find the mode of the largest move...
898	 MODE might not be used depending on the definitions of the
899	 USE_* macros below.  */
900      machine_mode mode ATTRIBUTE_UNUSED
901	= widest_int_mode_for_size (max_size);
902
903      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
904	{
905	  data.from_addr = copy_to_mode_reg (from_addr_mode,
906					     plus_constant (from_addr_mode,
907							    from_addr, len));
908	  data.autinc_from = 1;
909	  data.explicit_inc_from = -1;
910	}
911      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
912	{
913	  data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
914	  data.autinc_from = 1;
915	  data.explicit_inc_from = 1;
916	}
917      if (!data.autinc_from && CONSTANT_P (from_addr))
918	data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
919      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
920	{
921	  data.to_addr = copy_to_mode_reg (to_addr_mode,
922					   plus_constant (to_addr_mode,
923							  to_addr, len));
924	  data.autinc_to = 1;
925	  data.explicit_inc_to = -1;
926	}
927      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
928	{
929	  data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
930	  data.autinc_to = 1;
931	  data.explicit_inc_to = 1;
932	}
933      if (!data.autinc_to && CONSTANT_P (to_addr))
934	data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
935    }
936
937  align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
938
939  /* First move what we can in the largest integer mode, then go to
940     successively smaller modes.  */
941
942  while (max_size > 1 && data.len > 0)
943    {
944      machine_mode mode = widest_int_mode_for_size (max_size);
945
946      if (mode == VOIDmode)
947	break;
948
949      icode = optab_handler (mov_optab, mode);
950      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
951	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
952
953      max_size = GET_MODE_SIZE (mode);
954    }
955
956  /* The code above should have handled everything.  */
957  gcc_assert (!data.len);
958
959  if (endp)
960    {
961      rtx to1;
962
963      gcc_assert (!data.reverse);
964      if (data.autinc_to)
965	{
966	  if (endp == 2)
967	    {
968	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
969		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
970	      else
971		data.to_addr = copy_to_mode_reg (to_addr_mode,
972						 plus_constant (to_addr_mode,
973								data.to_addr,
974								-1));
975	    }
976	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
977					   data.offset);
978	}
979      else
980	{
981	  if (endp == 2)
982	    --data.offset;
983	  to1 = adjust_address (data.to, QImode, data.offset);
984	}
985      return to1;
986    }
987  else
988    return data.to;
989}
990
991/* Return number of insns required to move L bytes by pieces.
992   ALIGN (in bits) is maximum alignment we can assume.  */
993
994unsigned HOST_WIDE_INT
995move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
996		       unsigned int max_size)
997{
998  unsigned HOST_WIDE_INT n_insns = 0;
999
1000  align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1001
1002  while (max_size > 1 && l > 0)
1003    {
1004      machine_mode mode;
1005      enum insn_code icode;
1006
1007      mode = widest_int_mode_for_size (max_size);
1008
1009      if (mode == VOIDmode)
1010	break;
1011
1012      icode = optab_handler (mov_optab, mode);
1013      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1014	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1015
1016      max_size = GET_MODE_SIZE (mode);
1017    }
1018
1019  gcc_assert (!l);
1020  return n_insns;
1021}
1022
1023/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1024   with move instructions for mode MODE.  GENFUN is the gen_... function
1025   to make a move insn for that mode.  DATA has all the other info.  */
1026
1027static void
1028move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1029		  struct move_by_pieces_d *data)
1030{
1031  unsigned int size = GET_MODE_SIZE (mode);
1032  rtx to1 = NULL_RTX, from1;
1033
1034  while (data->len >= size)
1035    {
1036      if (data->reverse)
1037	data->offset -= size;
1038
1039      if (data->to)
1040	{
1041	  if (data->autinc_to)
1042	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1043					     data->offset);
1044	  else
1045	    to1 = adjust_address (data->to, mode, data->offset);
1046	}
1047
1048      if (data->autinc_from)
1049	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1050					   data->offset);
1051      else
1052	from1 = adjust_address (data->from, mode, data->offset);
1053
1054      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1055	emit_insn (gen_add2_insn (data->to_addr,
1056				  gen_int_mode (-(HOST_WIDE_INT) size,
1057						GET_MODE (data->to_addr))));
1058      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1059	emit_insn (gen_add2_insn (data->from_addr,
1060				  gen_int_mode (-(HOST_WIDE_INT) size,
1061						GET_MODE (data->from_addr))));
1062
1063      if (data->to)
1064	emit_insn ((*genfun) (to1, from1));
1065      else
1066	{
1067#ifdef PUSH_ROUNDING
1068	  emit_single_push_insn (mode, from1, NULL);
1069#else
1070	  gcc_unreachable ();
1071#endif
1072	}
1073
1074      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1075	emit_insn (gen_add2_insn (data->to_addr,
1076				  gen_int_mode (size,
1077						GET_MODE (data->to_addr))));
1078      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1079	emit_insn (gen_add2_insn (data->from_addr,
1080				  gen_int_mode (size,
1081						GET_MODE (data->from_addr))));
1082
1083      if (! data->reverse)
1084	data->offset += size;
1085
1086      data->len -= size;
1087    }
1088}
1089
1090/* Emit code to move a block Y to a block X.  This may be done with
1091   string-move instructions, with multiple scalar move instructions,
1092   or with a library call.
1093
1094   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1095   SIZE is an rtx that says how long they are.
1096   ALIGN is the maximum alignment we can assume they have.
1097   METHOD describes what kind of copy this is, and what mechanisms may be used.
1098   MIN_SIZE is the minimal size of block to move
1099   MAX_SIZE is the maximal size of block to move, if it can not be represented
1100   in unsigned HOST_WIDE_INT, than it is mask of all ones.
1101
1102   Return the address of the new block, if memcpy is called and returns it,
1103   0 otherwise.  */
1104
1105rtx
1106emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1107		       unsigned int expected_align, HOST_WIDE_INT expected_size,
1108		       unsigned HOST_WIDE_INT min_size,
1109		       unsigned HOST_WIDE_INT max_size,
1110		       unsigned HOST_WIDE_INT probable_max_size)
1111{
1112  bool may_use_call;
1113  rtx retval = 0;
1114  unsigned int align;
1115
1116  gcc_assert (size);
1117  if (CONST_INT_P (size)
1118      && INTVAL (size) == 0)
1119    return 0;
1120
1121  switch (method)
1122    {
1123    case BLOCK_OP_NORMAL:
1124    case BLOCK_OP_TAILCALL:
1125      may_use_call = true;
1126      break;
1127
1128    case BLOCK_OP_CALL_PARM:
1129      may_use_call = block_move_libcall_safe_for_call_parm ();
1130
1131      /* Make inhibit_defer_pop nonzero around the library call
1132	 to force it to pop the arguments right away.  */
1133      NO_DEFER_POP;
1134      break;
1135
1136    case BLOCK_OP_NO_LIBCALL:
1137      may_use_call = false;
1138      break;
1139
1140    default:
1141      gcc_unreachable ();
1142    }
1143
1144  gcc_assert (MEM_P (x) && MEM_P (y));
1145  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1146  gcc_assert (align >= BITS_PER_UNIT);
1147
1148  /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1149     block copy is more efficient for other large modes, e.g. DCmode.  */
1150  x = adjust_address (x, BLKmode, 0);
1151  y = adjust_address (y, BLKmode, 0);
1152
1153  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1154     can be incorrect is coming from __builtin_memcpy.  */
1155  if (CONST_INT_P (size))
1156    {
1157      x = shallow_copy_rtx (x);
1158      y = shallow_copy_rtx (y);
1159      set_mem_size (x, INTVAL (size));
1160      set_mem_size (y, INTVAL (size));
1161    }
1162
1163  if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1164    move_by_pieces (x, y, INTVAL (size), align, 0);
1165  else if (emit_block_move_via_movmem (x, y, size, align,
1166				       expected_align, expected_size,
1167				       min_size, max_size, probable_max_size))
1168    ;
1169  else if (may_use_call
1170	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1171	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1172    {
1173      /* Since x and y are passed to a libcall, mark the corresponding
1174	 tree EXPR as addressable.  */
1175      tree y_expr = MEM_EXPR (y);
1176      tree x_expr = MEM_EXPR (x);
1177      if (y_expr)
1178	mark_addressable (y_expr);
1179      if (x_expr)
1180	mark_addressable (x_expr);
1181      retval = emit_block_move_via_libcall (x, y, size,
1182					    method == BLOCK_OP_TAILCALL);
1183    }
1184
1185  else
1186    emit_block_move_via_loop (x, y, size, align);
1187
1188  if (method == BLOCK_OP_CALL_PARM)
1189    OK_DEFER_POP;
1190
1191  return retval;
1192}
1193
1194rtx
1195emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1196{
1197  unsigned HOST_WIDE_INT max, min = 0;
1198  if (GET_CODE (size) == CONST_INT)
1199    min = max = UINTVAL (size);
1200  else
1201    max = GET_MODE_MASK (GET_MODE (size));
1202  return emit_block_move_hints (x, y, size, method, 0, -1,
1203				min, max, max);
1204}
1205
1206/* A subroutine of emit_block_move.  Returns true if calling the
1207   block move libcall will not clobber any parameters which may have
1208   already been placed on the stack.  */
1209
1210static bool
1211block_move_libcall_safe_for_call_parm (void)
1212{
1213#if defined (REG_PARM_STACK_SPACE)
1214  tree fn;
1215#endif
1216
1217  /* If arguments are pushed on the stack, then they're safe.  */
1218  if (PUSH_ARGS)
1219    return true;
1220
1221  /* If registers go on the stack anyway, any argument is sure to clobber
1222     an outgoing argument.  */
1223#if defined (REG_PARM_STACK_SPACE)
1224  fn = emit_block_move_libcall_fn (false);
1225  /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1226     depend on its argument.  */
1227  (void) fn;
1228  if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1229      && REG_PARM_STACK_SPACE (fn) != 0)
1230    return false;
1231#endif
1232
1233  /* If any argument goes in memory, then it might clobber an outgoing
1234     argument.  */
1235  {
1236    CUMULATIVE_ARGS args_so_far_v;
1237    cumulative_args_t args_so_far;
1238    tree fn, arg;
1239
1240    fn = emit_block_move_libcall_fn (false);
1241    INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1242    args_so_far = pack_cumulative_args (&args_so_far_v);
1243
1244    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1245    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1246      {
1247	machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1248	rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1249					      NULL_TREE, true);
1250	if (!tmp || !REG_P (tmp))
1251	  return false;
1252	if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1253	  return false;
1254	targetm.calls.function_arg_advance (args_so_far, mode,
1255					    NULL_TREE, true);
1256      }
1257  }
1258  return true;
1259}
1260
1261/* A subroutine of emit_block_move.  Expand a movmem pattern;
1262   return true if successful.  */
1263
1264static bool
1265emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1266			    unsigned int expected_align, HOST_WIDE_INT expected_size,
1267			    unsigned HOST_WIDE_INT min_size,
1268			    unsigned HOST_WIDE_INT max_size,
1269			    unsigned HOST_WIDE_INT probable_max_size)
1270{
1271  int save_volatile_ok = volatile_ok;
1272  machine_mode mode;
1273
1274  if (expected_align < align)
1275    expected_align = align;
1276  if (expected_size != -1)
1277    {
1278      if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1279	expected_size = probable_max_size;
1280      if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1281	expected_size = min_size;
1282    }
1283
1284  /* Since this is a move insn, we don't care about volatility.  */
1285  volatile_ok = 1;
1286
1287  /* Try the most limited insn first, because there's no point
1288     including more than one in the machine description unless
1289     the more limited one has some advantage.  */
1290
1291  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1292       mode = GET_MODE_WIDER_MODE (mode))
1293    {
1294      enum insn_code code = direct_optab_handler (movmem_optab, mode);
1295
1296      if (code != CODE_FOR_nothing
1297	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1298	     here because if SIZE is less than the mode mask, as it is
1299	     returned by the macro, it will definitely be less than the
1300	     actual mode mask.  Since SIZE is within the Pmode address
1301	     space, we limit MODE to Pmode.  */
1302	  && ((CONST_INT_P (size)
1303	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1304		   <= (GET_MODE_MASK (mode) >> 1)))
1305	      || max_size <= (GET_MODE_MASK (mode) >> 1)
1306	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1307	{
1308	  struct expand_operand ops[9];
1309	  unsigned int nops;
1310
1311	  /* ??? When called via emit_block_move_for_call, it'd be
1312	     nice if there were some way to inform the backend, so
1313	     that it doesn't fail the expansion because it thinks
1314	     emitting the libcall would be more efficient.  */
1315	  nops = insn_data[(int) code].n_generator_args;
1316	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1317
1318	  create_fixed_operand (&ops[0], x);
1319	  create_fixed_operand (&ops[1], y);
1320	  /* The check above guarantees that this size conversion is valid.  */
1321	  create_convert_operand_to (&ops[2], size, mode, true);
1322	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1323	  if (nops >= 6)
1324	    {
1325	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1326	      create_integer_operand (&ops[5], expected_size);
1327	    }
1328	  if (nops >= 8)
1329	    {
1330	      create_integer_operand (&ops[6], min_size);
1331	      /* If we can not represent the maximal size,
1332		 make parameter NULL.  */
1333	      if ((HOST_WIDE_INT) max_size != -1)
1334	        create_integer_operand (&ops[7], max_size);
1335	      else
1336		create_fixed_operand (&ops[7], NULL);
1337	    }
1338	  if (nops == 9)
1339	    {
1340	      /* If we can not represent the maximal size,
1341		 make parameter NULL.  */
1342	      if ((HOST_WIDE_INT) probable_max_size != -1)
1343	        create_integer_operand (&ops[8], probable_max_size);
1344	      else
1345		create_fixed_operand (&ops[8], NULL);
1346	    }
1347	  if (maybe_expand_insn (code, nops, ops))
1348	    {
1349	      volatile_ok = save_volatile_ok;
1350	      return true;
1351	    }
1352	}
1353    }
1354
1355  volatile_ok = save_volatile_ok;
1356  return false;
1357}
1358
1359/* A subroutine of emit_block_move.  Expand a call to memcpy.
1360   Return the return value from memcpy, 0 otherwise.  */
1361
1362rtx
1363emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1364{
1365  rtx dst_addr, src_addr;
1366  tree call_expr, fn, src_tree, dst_tree, size_tree;
1367  machine_mode size_mode;
1368  rtx retval;
1369
1370  /* Emit code to copy the addresses of DST and SRC and SIZE into new
1371     pseudos.  We can then place those new pseudos into a VAR_DECL and
1372     use them later.  */
1373
1374  dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1375  src_addr = copy_addr_to_reg (XEXP (src, 0));
1376
1377  dst_addr = convert_memory_address (ptr_mode, dst_addr);
1378  src_addr = convert_memory_address (ptr_mode, src_addr);
1379
1380  dst_tree = make_tree (ptr_type_node, dst_addr);
1381  src_tree = make_tree (ptr_type_node, src_addr);
1382
1383  size_mode = TYPE_MODE (sizetype);
1384
1385  size = convert_to_mode (size_mode, size, 1);
1386  size = copy_to_mode_reg (size_mode, size);
1387
1388  /* It is incorrect to use the libcall calling conventions to call
1389     memcpy in this context.  This could be a user call to memcpy and
1390     the user may wish to examine the return value from memcpy.  For
1391     targets where libcalls and normal calls have different conventions
1392     for returning pointers, we could end up generating incorrect code.  */
1393
1394  size_tree = make_tree (sizetype, size);
1395
1396  fn = emit_block_move_libcall_fn (true);
1397  call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1398  CALL_EXPR_TAILCALL (call_expr) = tailcall;
1399
1400  retval = expand_normal (call_expr);
1401
1402  return retval;
1403}
1404
1405/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1406   for the function we use for block copies.  */
1407
1408static GTY(()) tree block_move_fn;
1409
1410void
1411init_block_move_fn (const char *asmspec)
1412{
1413  if (!block_move_fn)
1414    {
1415      tree args, fn, attrs, attr_args;
1416
1417      fn = get_identifier ("memcpy");
1418      args = build_function_type_list (ptr_type_node, ptr_type_node,
1419				       const_ptr_type_node, sizetype,
1420				       NULL_TREE);
1421
1422      fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1423      DECL_EXTERNAL (fn) = 1;
1424      TREE_PUBLIC (fn) = 1;
1425      DECL_ARTIFICIAL (fn) = 1;
1426      TREE_NOTHROW (fn) = 1;
1427      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1428      DECL_VISIBILITY_SPECIFIED (fn) = 1;
1429
1430      attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1431      attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1432
1433      decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1434
1435      block_move_fn = fn;
1436    }
1437
1438  if (asmspec)
1439    set_user_assembler_name (block_move_fn, asmspec);
1440}
1441
1442static tree
1443emit_block_move_libcall_fn (int for_call)
1444{
1445  static bool emitted_extern;
1446
1447  if (!block_move_fn)
1448    init_block_move_fn (NULL);
1449
1450  if (for_call && !emitted_extern)
1451    {
1452      emitted_extern = true;
1453      make_decl_rtl (block_move_fn);
1454    }
1455
1456  return block_move_fn;
1457}
1458
1459/* A subroutine of emit_block_move.  Copy the data via an explicit
1460   loop.  This is used only when libcalls are forbidden.  */
1461/* ??? It'd be nice to copy in hunks larger than QImode.  */
1462
1463static void
1464emit_block_move_via_loop (rtx x, rtx y, rtx size,
1465			  unsigned int align ATTRIBUTE_UNUSED)
1466{
1467  rtx_code_label *cmp_label, *top_label;
1468  rtx iter, x_addr, y_addr, tmp;
1469  machine_mode x_addr_mode = get_address_mode (x);
1470  machine_mode y_addr_mode = get_address_mode (y);
1471  machine_mode iter_mode;
1472
1473  iter_mode = GET_MODE (size);
1474  if (iter_mode == VOIDmode)
1475    iter_mode = word_mode;
1476
1477  top_label = gen_label_rtx ();
1478  cmp_label = gen_label_rtx ();
1479  iter = gen_reg_rtx (iter_mode);
1480
1481  emit_move_insn (iter, const0_rtx);
1482
1483  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1484  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1485  do_pending_stack_adjust ();
1486
1487  emit_jump (cmp_label);
1488  emit_label (top_label);
1489
1490  tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1491  x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1492
1493  if (x_addr_mode != y_addr_mode)
1494    tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1495  y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1496
1497  x = change_address (x, QImode, x_addr);
1498  y = change_address (y, QImode, y_addr);
1499
1500  emit_move_insn (x, y);
1501
1502  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1503			     true, OPTAB_LIB_WIDEN);
1504  if (tmp != iter)
1505    emit_move_insn (iter, tmp);
1506
1507  emit_label (cmp_label);
1508
1509  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1510			   true, top_label, REG_BR_PROB_BASE * 90 / 100);
1511}
1512
1513/* Copy all or part of a value X into registers starting at REGNO.
1514   The number of registers to be filled is NREGS.  */
1515
1516void
1517move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1518{
1519  int i;
1520#ifdef HAVE_load_multiple
1521  rtx pat;
1522  rtx_insn *last;
1523#endif
1524
1525  if (nregs == 0)
1526    return;
1527
1528  if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1529    x = validize_mem (force_const_mem (mode, x));
1530
1531  /* See if the machine can do this with a load multiple insn.  */
1532#ifdef HAVE_load_multiple
1533  if (HAVE_load_multiple)
1534    {
1535      last = get_last_insn ();
1536      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1537			       GEN_INT (nregs));
1538      if (pat)
1539	{
1540	  emit_insn (pat);
1541	  return;
1542	}
1543      else
1544	delete_insns_since (last);
1545    }
1546#endif
1547
1548  for (i = 0; i < nregs; i++)
1549    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1550		    operand_subword_force (x, i, mode));
1551}
1552
1553/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1554   The number of registers to be filled is NREGS.  */
1555
1556void
1557move_block_from_reg (int regno, rtx x, int nregs)
1558{
1559  int i;
1560
1561  if (nregs == 0)
1562    return;
1563
1564  /* See if the machine can do this with a store multiple insn.  */
1565#ifdef HAVE_store_multiple
1566  if (HAVE_store_multiple)
1567    {
1568      rtx_insn *last = get_last_insn ();
1569      rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1570				    GEN_INT (nregs));
1571      if (pat)
1572	{
1573	  emit_insn (pat);
1574	  return;
1575	}
1576      else
1577	delete_insns_since (last);
1578    }
1579#endif
1580
1581  for (i = 0; i < nregs; i++)
1582    {
1583      rtx tem = operand_subword (x, i, 1, BLKmode);
1584
1585      gcc_assert (tem);
1586
1587      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1588    }
1589}
1590
1591/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1592   ORIG, where ORIG is a non-consecutive group of registers represented by
1593   a PARALLEL.  The clone is identical to the original except in that the
1594   original set of registers is replaced by a new set of pseudo registers.
1595   The new set has the same modes as the original set.  */
1596
1597rtx
1598gen_group_rtx (rtx orig)
1599{
1600  int i, length;
1601  rtx *tmps;
1602
1603  gcc_assert (GET_CODE (orig) == PARALLEL);
1604
1605  length = XVECLEN (orig, 0);
1606  tmps = XALLOCAVEC (rtx, length);
1607
1608  /* Skip a NULL entry in first slot.  */
1609  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1610
1611  if (i)
1612    tmps[0] = 0;
1613
1614  for (; i < length; i++)
1615    {
1616      machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1617      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1618
1619      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1620    }
1621
1622  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1623}
1624
1625/* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1626   except that values are placed in TMPS[i], and must later be moved
1627   into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1628
1629static void
1630emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1631{
1632  rtx src;
1633  int start, i;
1634  machine_mode m = GET_MODE (orig_src);
1635
1636  gcc_assert (GET_CODE (dst) == PARALLEL);
1637
1638  if (m != VOIDmode
1639      && !SCALAR_INT_MODE_P (m)
1640      && !MEM_P (orig_src)
1641      && GET_CODE (orig_src) != CONCAT)
1642    {
1643      machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1644      if (imode == BLKmode)
1645	src = assign_stack_temp (GET_MODE (orig_src), ssize);
1646      else
1647	src = gen_reg_rtx (imode);
1648      if (imode != BLKmode)
1649	src = gen_lowpart (GET_MODE (orig_src), src);
1650      emit_move_insn (src, orig_src);
1651      /* ...and back again.  */
1652      if (imode != BLKmode)
1653	src = gen_lowpart (imode, src);
1654      emit_group_load_1 (tmps, dst, src, type, ssize);
1655      return;
1656    }
1657
1658  /* Check for a NULL entry, used to indicate that the parameter goes
1659     both on the stack and in registers.  */
1660  if (XEXP (XVECEXP (dst, 0, 0), 0))
1661    start = 0;
1662  else
1663    start = 1;
1664
1665  /* Process the pieces.  */
1666  for (i = start; i < XVECLEN (dst, 0); i++)
1667    {
1668      machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1669      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1670      unsigned int bytelen = GET_MODE_SIZE (mode);
1671      int shift = 0;
1672
1673      /* Handle trailing fragments that run over the size of the struct.  */
1674      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1675	{
1676	  /* Arrange to shift the fragment to where it belongs.
1677	     extract_bit_field loads to the lsb of the reg.  */
1678	  if (
1679#ifdef BLOCK_REG_PADDING
1680	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1681	      == (BYTES_BIG_ENDIAN ? upward : downward)
1682#else
1683	      BYTES_BIG_ENDIAN
1684#endif
1685	      )
1686	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1687	  bytelen = ssize - bytepos;
1688	  gcc_assert (bytelen > 0);
1689	}
1690
1691      /* If we won't be loading directly from memory, protect the real source
1692	 from strange tricks we might play; but make sure that the source can
1693	 be loaded directly into the destination.  */
1694      src = orig_src;
1695      if (!MEM_P (orig_src)
1696	  && (!CONSTANT_P (orig_src)
1697	      || (GET_MODE (orig_src) != mode
1698		  && GET_MODE (orig_src) != VOIDmode)))
1699	{
1700	  if (GET_MODE (orig_src) == VOIDmode)
1701	    src = gen_reg_rtx (mode);
1702	  else
1703	    src = gen_reg_rtx (GET_MODE (orig_src));
1704
1705	  emit_move_insn (src, orig_src);
1706	}
1707
1708      /* Optimize the access just a bit.  */
1709      if (MEM_P (src)
1710	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1711	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1712	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1713	  && bytelen == GET_MODE_SIZE (mode))
1714	{
1715	  tmps[i] = gen_reg_rtx (mode);
1716	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1717	}
1718      else if (COMPLEX_MODE_P (mode)
1719	       && GET_MODE (src) == mode
1720	       && bytelen == GET_MODE_SIZE (mode))
1721	/* Let emit_move_complex do the bulk of the work.  */
1722	tmps[i] = src;
1723      else if (GET_CODE (src) == CONCAT)
1724	{
1725	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1726	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1727
1728	  if ((bytepos == 0 && bytelen == slen0)
1729	      || (bytepos != 0 && bytepos + bytelen <= slen))
1730	    {
1731	      /* The following assumes that the concatenated objects all
1732		 have the same size.  In this case, a simple calculation
1733		 can be used to determine the object and the bit field
1734		 to be extracted.  */
1735	      tmps[i] = XEXP (src, bytepos / slen0);
1736	      if (! CONSTANT_P (tmps[i])
1737		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1738		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1739					     (bytepos % slen0) * BITS_PER_UNIT,
1740					     1, NULL_RTX, mode, mode);
1741	    }
1742	  else
1743	    {
1744	      rtx mem;
1745
1746	      gcc_assert (!bytepos);
1747	      mem = assign_stack_temp (GET_MODE (src), slen);
1748	      emit_move_insn (mem, src);
1749	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1750					   0, 1, NULL_RTX, mode, mode);
1751	    }
1752	}
1753      /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1754	 SIMD register, which is currently broken.  While we get GCC
1755	 to emit proper RTL for these cases, let's dump to memory.  */
1756      else if (VECTOR_MODE_P (GET_MODE (dst))
1757	       && REG_P (src))
1758	{
1759	  int slen = GET_MODE_SIZE (GET_MODE (src));
1760	  rtx mem;
1761
1762	  mem = assign_stack_temp (GET_MODE (src), slen);
1763	  emit_move_insn (mem, src);
1764	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1765	}
1766      else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1767               && XVECLEN (dst, 0) > 1)
1768        tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1769      else if (CONSTANT_P (src))
1770	{
1771	  HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1772
1773	  if (len == ssize)
1774	    tmps[i] = src;
1775	  else
1776	    {
1777	      rtx first, second;
1778
1779	      /* TODO: const_wide_int can have sizes other than this...  */
1780	      gcc_assert (2 * len == ssize);
1781	      split_double (src, &first, &second);
1782	      if (i)
1783		tmps[i] = second;
1784	      else
1785		tmps[i] = first;
1786	    }
1787	}
1788      else if (REG_P (src) && GET_MODE (src) == mode)
1789	tmps[i] = src;
1790      else
1791	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1792				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1793				     mode, mode);
1794
1795      if (shift)
1796	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1797				shift, tmps[i], 0);
1798    }
1799}
1800
1801/* Emit code to move a block SRC of type TYPE to a block DST,
1802   where DST is non-consecutive registers represented by a PARALLEL.
1803   SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1804   if not known.  */
1805
1806void
1807emit_group_load (rtx dst, rtx src, tree type, int ssize)
1808{
1809  rtx *tmps;
1810  int i;
1811
1812  tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1813  emit_group_load_1 (tmps, dst, src, type, ssize);
1814
1815  /* Copy the extracted pieces into the proper (probable) hard regs.  */
1816  for (i = 0; i < XVECLEN (dst, 0); i++)
1817    {
1818      rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1819      if (d == NULL)
1820	continue;
1821      emit_move_insn (d, tmps[i]);
1822    }
1823}
1824
1825/* Similar, but load SRC into new pseudos in a format that looks like
1826   PARALLEL.  This can later be fed to emit_group_move to get things
1827   in the right place.  */
1828
1829rtx
1830emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1831{
1832  rtvec vec;
1833  int i;
1834
1835  vec = rtvec_alloc (XVECLEN (parallel, 0));
1836  emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1837
1838  /* Convert the vector to look just like the original PARALLEL, except
1839     with the computed values.  */
1840  for (i = 0; i < XVECLEN (parallel, 0); i++)
1841    {
1842      rtx e = XVECEXP (parallel, 0, i);
1843      rtx d = XEXP (e, 0);
1844
1845      if (d)
1846	{
1847	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1848	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1849	}
1850      RTVEC_ELT (vec, i) = e;
1851    }
1852
1853  return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1854}
1855
1856/* Emit code to move a block SRC to block DST, where SRC and DST are
1857   non-consecutive groups of registers, each represented by a PARALLEL.  */
1858
1859void
1860emit_group_move (rtx dst, rtx src)
1861{
1862  int i;
1863
1864  gcc_assert (GET_CODE (src) == PARALLEL
1865	      && GET_CODE (dst) == PARALLEL
1866	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1867
1868  /* Skip first entry if NULL.  */
1869  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1870    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1871		    XEXP (XVECEXP (src, 0, i), 0));
1872}
1873
1874/* Move a group of registers represented by a PARALLEL into pseudos.  */
1875
1876rtx
1877emit_group_move_into_temps (rtx src)
1878{
1879  rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1880  int i;
1881
1882  for (i = 0; i < XVECLEN (src, 0); i++)
1883    {
1884      rtx e = XVECEXP (src, 0, i);
1885      rtx d = XEXP (e, 0);
1886
1887      if (d)
1888	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1889      RTVEC_ELT (vec, i) = e;
1890    }
1891
1892  return gen_rtx_PARALLEL (GET_MODE (src), vec);
1893}
1894
1895/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1896   where SRC is non-consecutive registers represented by a PARALLEL.
1897   SSIZE represents the total size of block ORIG_DST, or -1 if not
1898   known.  */
1899
1900void
1901emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1902{
1903  rtx *tmps, dst;
1904  int start, finish, i;
1905  machine_mode m = GET_MODE (orig_dst);
1906
1907  gcc_assert (GET_CODE (src) == PARALLEL);
1908
1909  if (!SCALAR_INT_MODE_P (m)
1910      && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1911    {
1912      machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1913      if (imode == BLKmode)
1914        dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1915      else
1916        dst = gen_reg_rtx (imode);
1917      emit_group_store (dst, src, type, ssize);
1918      if (imode != BLKmode)
1919        dst = gen_lowpart (GET_MODE (orig_dst), dst);
1920      emit_move_insn (orig_dst, dst);
1921      return;
1922    }
1923
1924  /* Check for a NULL entry, used to indicate that the parameter goes
1925     both on the stack and in registers.  */
1926  if (XEXP (XVECEXP (src, 0, 0), 0))
1927    start = 0;
1928  else
1929    start = 1;
1930  finish = XVECLEN (src, 0);
1931
1932  tmps = XALLOCAVEC (rtx, finish);
1933
1934  /* Copy the (probable) hard regs into pseudos.  */
1935  for (i = start; i < finish; i++)
1936    {
1937      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1938      if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1939	{
1940	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1941	  emit_move_insn (tmps[i], reg);
1942	}
1943      else
1944	tmps[i] = reg;
1945    }
1946
1947  /* If we won't be storing directly into memory, protect the real destination
1948     from strange tricks we might play.  */
1949  dst = orig_dst;
1950  if (GET_CODE (dst) == PARALLEL)
1951    {
1952      rtx temp;
1953
1954      /* We can get a PARALLEL dst if there is a conditional expression in
1955	 a return statement.  In that case, the dst and src are the same,
1956	 so no action is necessary.  */
1957      if (rtx_equal_p (dst, src))
1958	return;
1959
1960      /* It is unclear if we can ever reach here, but we may as well handle
1961	 it.  Allocate a temporary, and split this into a store/load to/from
1962	 the temporary.  */
1963      temp = assign_stack_temp (GET_MODE (dst), ssize);
1964      emit_group_store (temp, src, type, ssize);
1965      emit_group_load (dst, temp, type, ssize);
1966      return;
1967    }
1968  else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1969    {
1970      machine_mode outer = GET_MODE (dst);
1971      machine_mode inner;
1972      HOST_WIDE_INT bytepos;
1973      bool done = false;
1974      rtx temp;
1975
1976      if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1977	dst = gen_reg_rtx (outer);
1978
1979      /* Make life a bit easier for combine.  */
1980      /* If the first element of the vector is the low part
1981	 of the destination mode, use a paradoxical subreg to
1982	 initialize the destination.  */
1983      if (start < finish)
1984	{
1985	  inner = GET_MODE (tmps[start]);
1986	  bytepos = subreg_lowpart_offset (inner, outer);
1987	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1988	    {
1989	      temp = simplify_gen_subreg (outer, tmps[start],
1990					  inner, 0);
1991	      if (temp)
1992		{
1993		  emit_move_insn (dst, temp);
1994		  done = true;
1995		  start++;
1996		}
1997	    }
1998	}
1999
2000      /* If the first element wasn't the low part, try the last.  */
2001      if (!done
2002	  && start < finish - 1)
2003	{
2004	  inner = GET_MODE (tmps[finish - 1]);
2005	  bytepos = subreg_lowpart_offset (inner, outer);
2006	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2007	    {
2008	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
2009					  inner, 0);
2010	      if (temp)
2011		{
2012		  emit_move_insn (dst, temp);
2013		  done = true;
2014		  finish--;
2015		}
2016	    }
2017	}
2018
2019      /* Otherwise, simply initialize the result to zero.  */
2020      if (!done)
2021        emit_move_insn (dst, CONST0_RTX (outer));
2022    }
2023
2024  /* Process the pieces.  */
2025  for (i = start; i < finish; i++)
2026    {
2027      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2028      machine_mode mode = GET_MODE (tmps[i]);
2029      unsigned int bytelen = GET_MODE_SIZE (mode);
2030      unsigned int adj_bytelen;
2031      rtx dest = dst;
2032
2033      /* Handle trailing fragments that run over the size of the struct.  */
2034      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2035	adj_bytelen = ssize - bytepos;
2036      else
2037	adj_bytelen = bytelen;
2038
2039      if (GET_CODE (dst) == CONCAT)
2040	{
2041	  if (bytepos + adj_bytelen
2042	      <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2043	    dest = XEXP (dst, 0);
2044	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2045	    {
2046	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2047	      dest = XEXP (dst, 1);
2048	    }
2049	  else
2050	    {
2051	      machine_mode dest_mode = GET_MODE (dest);
2052	      machine_mode tmp_mode = GET_MODE (tmps[i]);
2053
2054	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2055
2056	      if (GET_MODE_ALIGNMENT (dest_mode)
2057		  >= GET_MODE_ALIGNMENT (tmp_mode))
2058		{
2059		  dest = assign_stack_temp (dest_mode,
2060					    GET_MODE_SIZE (dest_mode));
2061		  emit_move_insn (adjust_address (dest,
2062						  tmp_mode,
2063						  bytepos),
2064				  tmps[i]);
2065		  dst = dest;
2066		}
2067	      else
2068		{
2069		  dest = assign_stack_temp (tmp_mode,
2070					    GET_MODE_SIZE (tmp_mode));
2071		  emit_move_insn (dest, tmps[i]);
2072		  dst = adjust_address (dest, dest_mode, bytepos);
2073		}
2074	      break;
2075	    }
2076	}
2077
2078      /* Handle trailing fragments that run over the size of the struct.  */
2079      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2080	{
2081	  /* store_bit_field always takes its value from the lsb.
2082	     Move the fragment to the lsb if it's not already there.  */
2083	  if (
2084#ifdef BLOCK_REG_PADDING
2085	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2086	      == (BYTES_BIG_ENDIAN ? upward : downward)
2087#else
2088	      BYTES_BIG_ENDIAN
2089#endif
2090	      )
2091	    {
2092	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2093	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2094				      shift, tmps[i], 0);
2095	    }
2096
2097	  /* Make sure not to write past the end of the struct.  */
2098	  store_bit_field (dest,
2099			   adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2100			   bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2101			   VOIDmode, tmps[i]);
2102	}
2103
2104      /* Optimize the access just a bit.  */
2105      else if (MEM_P (dest)
2106	       && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2107		   || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2108	       && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2109	       && bytelen == GET_MODE_SIZE (mode))
2110	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2111
2112      else
2113	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2114			 0, 0, mode, tmps[i]);
2115    }
2116
2117  /* Copy from the pseudo into the (probable) hard reg.  */
2118  if (orig_dst != dst)
2119    emit_move_insn (orig_dst, dst);
2120}
2121
2122/* Return a form of X that does not use a PARALLEL.  TYPE is the type
2123   of the value stored in X.  */
2124
2125rtx
2126maybe_emit_group_store (rtx x, tree type)
2127{
2128  machine_mode mode = TYPE_MODE (type);
2129  gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2130  if (GET_CODE (x) == PARALLEL)
2131    {
2132      rtx result = gen_reg_rtx (mode);
2133      emit_group_store (result, x, type, int_size_in_bytes (type));
2134      return result;
2135    }
2136  return x;
2137}
2138
2139/* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2140
2141   This is used on targets that return BLKmode values in registers.  */
2142
2143void
2144copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2145{
2146  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2147  rtx src = NULL, dst = NULL;
2148  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2149  unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2150  machine_mode mode = GET_MODE (srcreg);
2151  machine_mode tmode = GET_MODE (target);
2152  machine_mode copy_mode;
2153
2154  /* BLKmode registers created in the back-end shouldn't have survived.  */
2155  gcc_assert (mode != BLKmode);
2156
2157  /* If the structure doesn't take up a whole number of words, see whether
2158     SRCREG is padded on the left or on the right.  If it's on the left,
2159     set PADDING_CORRECTION to the number of bits to skip.
2160
2161     In most ABIs, the structure will be returned at the least end of
2162     the register, which translates to right padding on little-endian
2163     targets and left padding on big-endian targets.  The opposite
2164     holds if the structure is returned at the most significant
2165     end of the register.  */
2166  if (bytes % UNITS_PER_WORD != 0
2167      && (targetm.calls.return_in_msb (type)
2168	  ? !BYTES_BIG_ENDIAN
2169	  : BYTES_BIG_ENDIAN))
2170    padding_correction
2171      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2172
2173  /* We can use a single move if we have an exact mode for the size.  */
2174  else if (MEM_P (target)
2175	   && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2176	       || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2177	   && bytes == GET_MODE_SIZE (mode))
2178  {
2179    emit_move_insn (adjust_address (target, mode, 0), srcreg);
2180    return;
2181  }
2182
2183  /* And if we additionally have the same mode for a register.  */
2184  else if (REG_P (target)
2185	   && GET_MODE (target) == mode
2186	   && bytes == GET_MODE_SIZE (mode))
2187  {
2188    emit_move_insn (target, srcreg);
2189    return;
2190  }
2191
2192  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2193     into a new pseudo which is a full word.  */
2194  if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2195    {
2196      srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2197      mode = word_mode;
2198    }
2199
2200  /* Copy the structure BITSIZE bits at a time.  If the target lives in
2201     memory, take care of not reading/writing past its end by selecting
2202     a copy mode suited to BITSIZE.  This should always be possible given
2203     how it is computed.
2204
2205     If the target lives in register, make sure not to select a copy mode
2206     larger than the mode of the register.
2207
2208     We could probably emit more efficient code for machines which do not use
2209     strict alignment, but it doesn't seem worth the effort at the current
2210     time.  */
2211
2212  copy_mode = word_mode;
2213  if (MEM_P (target))
2214    {
2215      machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2216      if (mem_mode != BLKmode)
2217	copy_mode = mem_mode;
2218    }
2219  else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2220    copy_mode = tmode;
2221
2222  for (bitpos = 0, xbitpos = padding_correction;
2223       bitpos < bytes * BITS_PER_UNIT;
2224       bitpos += bitsize, xbitpos += bitsize)
2225    {
2226      /* We need a new source operand each time xbitpos is on a
2227	 word boundary and when xbitpos == padding_correction
2228	 (the first time through).  */
2229      if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2230	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2231
2232      /* We need a new destination operand each time bitpos is on
2233	 a word boundary.  */
2234      if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2235	dst = target;
2236      else if (bitpos % BITS_PER_WORD == 0)
2237	dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2238
2239      /* Use xbitpos for the source extraction (right justified) and
2240	 bitpos for the destination store (left justified).  */
2241      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2242		       extract_bit_field (src, bitsize,
2243					  xbitpos % BITS_PER_WORD, 1,
2244					  NULL_RTX, copy_mode, copy_mode));
2245    }
2246}
2247
2248/* Copy BLKmode value SRC into a register of mode MODE.  Return the
2249   register if it contains any data, otherwise return null.
2250
2251   This is used on targets that return BLKmode values in registers.  */
2252
2253rtx
2254copy_blkmode_to_reg (machine_mode mode, tree src)
2255{
2256  int i, n_regs;
2257  unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2258  unsigned int bitsize;
2259  rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2260  machine_mode dst_mode;
2261
2262  gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2263
2264  x = expand_normal (src);
2265
2266  bytes = int_size_in_bytes (TREE_TYPE (src));
2267  if (bytes == 0)
2268    return NULL_RTX;
2269
2270  /* If the structure doesn't take up a whole number of words, see
2271     whether the register value should be padded on the left or on
2272     the right.  Set PADDING_CORRECTION to the number of padding
2273     bits needed on the left side.
2274
2275     In most ABIs, the structure will be returned at the least end of
2276     the register, which translates to right padding on little-endian
2277     targets and left padding on big-endian targets.  The opposite
2278     holds if the structure is returned at the most significant
2279     end of the register.  */
2280  if (bytes % UNITS_PER_WORD != 0
2281      && (targetm.calls.return_in_msb (TREE_TYPE (src))
2282	  ? !BYTES_BIG_ENDIAN
2283	  : BYTES_BIG_ENDIAN))
2284    padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2285					   * BITS_PER_UNIT));
2286
2287  n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2288  dst_words = XALLOCAVEC (rtx, n_regs);
2289  bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2290
2291  /* Copy the structure BITSIZE bits at a time.  */
2292  for (bitpos = 0, xbitpos = padding_correction;
2293       bitpos < bytes * BITS_PER_UNIT;
2294       bitpos += bitsize, xbitpos += bitsize)
2295    {
2296      /* We need a new destination pseudo each time xbitpos is
2297	 on a word boundary and when xbitpos == padding_correction
2298	 (the first time through).  */
2299      if (xbitpos % BITS_PER_WORD == 0
2300	  || xbitpos == padding_correction)
2301	{
2302	  /* Generate an appropriate register.  */
2303	  dst_word = gen_reg_rtx (word_mode);
2304	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2305
2306	  /* Clear the destination before we move anything into it.  */
2307	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2308	}
2309
2310      /* We need a new source operand each time bitpos is on a word
2311	 boundary.  */
2312      if (bitpos % BITS_PER_WORD == 0)
2313	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2314
2315      /* Use bitpos for the source extraction (left justified) and
2316	 xbitpos for the destination store (right justified).  */
2317      store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2318		       0, 0, word_mode,
2319		       extract_bit_field (src_word, bitsize,
2320					  bitpos % BITS_PER_WORD, 1,
2321					  NULL_RTX, word_mode, word_mode));
2322    }
2323
2324  if (mode == BLKmode)
2325    {
2326      /* Find the smallest integer mode large enough to hold the
2327	 entire structure.  */
2328      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2329	   mode != VOIDmode;
2330	   mode = GET_MODE_WIDER_MODE (mode))
2331	/* Have we found a large enough mode?  */
2332	if (GET_MODE_SIZE (mode) >= bytes)
2333	  break;
2334
2335      /* A suitable mode should have been found.  */
2336      gcc_assert (mode != VOIDmode);
2337    }
2338
2339  if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2340    dst_mode = word_mode;
2341  else
2342    dst_mode = mode;
2343  dst = gen_reg_rtx (dst_mode);
2344
2345  for (i = 0; i < n_regs; i++)
2346    emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2347
2348  if (mode != dst_mode)
2349    dst = gen_lowpart (mode, dst);
2350
2351  return dst;
2352}
2353
2354/* Add a USE expression for REG to the (possibly empty) list pointed
2355   to by CALL_FUSAGE.  REG must denote a hard register.  */
2356
2357void
2358use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2359{
2360  gcc_assert (REG_P (reg));
2361
2362  if (!HARD_REGISTER_P (reg))
2363    return;
2364
2365  *call_fusage
2366    = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2367}
2368
2369/* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2370   to by CALL_FUSAGE.  REG must denote a hard register.  */
2371
2372void
2373clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2374{
2375  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2376
2377  *call_fusage
2378    = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2379}
2380
2381/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2382   starting at REGNO.  All of these registers must be hard registers.  */
2383
2384void
2385use_regs (rtx *call_fusage, int regno, int nregs)
2386{
2387  int i;
2388
2389  gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2390
2391  for (i = 0; i < nregs; i++)
2392    use_reg (call_fusage, regno_reg_rtx[regno + i]);
2393}
2394
2395/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2396   PARALLEL REGS.  This is for calls that pass values in multiple
2397   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2398
2399void
2400use_group_regs (rtx *call_fusage, rtx regs)
2401{
2402  int i;
2403
2404  for (i = 0; i < XVECLEN (regs, 0); i++)
2405    {
2406      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2407
2408      /* A NULL entry means the parameter goes both on the stack and in
2409	 registers.  This can also be a MEM for targets that pass values
2410	 partially on the stack and partially in registers.  */
2411      if (reg != 0 && REG_P (reg))
2412	use_reg (call_fusage, reg);
2413    }
2414}
2415
2416/* Return the defining gimple statement for SSA_NAME NAME if it is an
2417   assigment and the code of the expresion on the RHS is CODE.  Return
2418   NULL otherwise.  */
2419
2420static gimple
2421get_def_for_expr (tree name, enum tree_code code)
2422{
2423  gimple def_stmt;
2424
2425  if (TREE_CODE (name) != SSA_NAME)
2426    return NULL;
2427
2428  def_stmt = get_gimple_for_ssa_name (name);
2429  if (!def_stmt
2430      || gimple_assign_rhs_code (def_stmt) != code)
2431    return NULL;
2432
2433  return def_stmt;
2434}
2435
2436#ifdef HAVE_conditional_move
2437/* Return the defining gimple statement for SSA_NAME NAME if it is an
2438   assigment and the class of the expresion on the RHS is CLASS.  Return
2439   NULL otherwise.  */
2440
2441static gimple
2442get_def_for_expr_class (tree name, enum tree_code_class tclass)
2443{
2444  gimple def_stmt;
2445
2446  if (TREE_CODE (name) != SSA_NAME)
2447    return NULL;
2448
2449  def_stmt = get_gimple_for_ssa_name (name);
2450  if (!def_stmt
2451      || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2452    return NULL;
2453
2454  return def_stmt;
2455}
2456#endif
2457
2458
2459/* Determine whether the LEN bytes generated by CONSTFUN can be
2460   stored to memory using several move instructions.  CONSTFUNDATA is
2461   a pointer which will be passed as argument in every CONSTFUN call.
2462   ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2463   a memset operation and false if it's a copy of a constant string.
2464   Return nonzero if a call to store_by_pieces should succeed.  */
2465
2466int
2467can_store_by_pieces (unsigned HOST_WIDE_INT len,
2468		     rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2469		     void *constfundata, unsigned int align, bool memsetp)
2470{
2471  unsigned HOST_WIDE_INT l;
2472  unsigned int max_size;
2473  HOST_WIDE_INT offset = 0;
2474  machine_mode mode;
2475  enum insn_code icode;
2476  int reverse;
2477  /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
2478  rtx cst ATTRIBUTE_UNUSED;
2479
2480  if (len == 0)
2481    return 1;
2482
2483  if (!targetm.use_by_pieces_infrastructure_p (len, align,
2484					       memsetp
2485						 ? SET_BY_PIECES
2486						 : STORE_BY_PIECES,
2487					       optimize_insn_for_speed_p ()))
2488    return 0;
2489
2490  align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2491
2492  /* We would first store what we can in the largest integer mode, then go to
2493     successively smaller modes.  */
2494
2495  for (reverse = 0;
2496       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2497       reverse++)
2498    {
2499      l = len;
2500      max_size = STORE_MAX_PIECES + 1;
2501      while (max_size > 1 && l > 0)
2502	{
2503	  mode = widest_int_mode_for_size (max_size);
2504
2505	  if (mode == VOIDmode)
2506	    break;
2507
2508	  icode = optab_handler (mov_optab, mode);
2509	  if (icode != CODE_FOR_nothing
2510	      && align >= GET_MODE_ALIGNMENT (mode))
2511	    {
2512	      unsigned int size = GET_MODE_SIZE (mode);
2513
2514	      while (l >= size)
2515		{
2516		  if (reverse)
2517		    offset -= size;
2518
2519		  cst = (*constfun) (constfundata, offset, mode);
2520		  if (!targetm.legitimate_constant_p (mode, cst))
2521		    return 0;
2522
2523		  if (!reverse)
2524		    offset += size;
2525
2526		  l -= size;
2527		}
2528	    }
2529
2530	  max_size = GET_MODE_SIZE (mode);
2531	}
2532
2533      /* The code above should have handled everything.  */
2534      gcc_assert (!l);
2535    }
2536
2537  return 1;
2538}
2539
2540/* Generate several move instructions to store LEN bytes generated by
2541   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2542   pointer which will be passed as argument in every CONSTFUN call.
2543   ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2544   a memset operation and false if it's a copy of a constant string.
2545   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2546   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2547   stpcpy.  */
2548
2549rtx
2550store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2551		 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2552		 void *constfundata, unsigned int align, bool memsetp, int endp)
2553{
2554  machine_mode to_addr_mode = get_address_mode (to);
2555  struct store_by_pieces_d data;
2556
2557  if (len == 0)
2558    {
2559      gcc_assert (endp != 2);
2560      return to;
2561    }
2562
2563  gcc_assert (targetm.use_by_pieces_infrastructure_p
2564		(len, align,
2565		 memsetp
2566		   ? SET_BY_PIECES
2567		   : STORE_BY_PIECES,
2568		 optimize_insn_for_speed_p ()));
2569
2570  data.constfun = constfun;
2571  data.constfundata = constfundata;
2572  data.len = len;
2573  data.to = to;
2574  store_by_pieces_1 (&data, align);
2575  if (endp)
2576    {
2577      rtx to1;
2578
2579      gcc_assert (!data.reverse);
2580      if (data.autinc_to)
2581	{
2582	  if (endp == 2)
2583	    {
2584	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2585		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2586	      else
2587		data.to_addr = copy_to_mode_reg (to_addr_mode,
2588						 plus_constant (to_addr_mode,
2589								data.to_addr,
2590								-1));
2591	    }
2592	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2593					   data.offset);
2594	}
2595      else
2596	{
2597	  if (endp == 2)
2598	    --data.offset;
2599	  to1 = adjust_address (data.to, QImode, data.offset);
2600	}
2601      return to1;
2602    }
2603  else
2604    return data.to;
2605}
2606
2607/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2608   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2609
2610static void
2611clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2612{
2613  struct store_by_pieces_d data;
2614
2615  if (len == 0)
2616    return;
2617
2618  data.constfun = clear_by_pieces_1;
2619  data.constfundata = NULL;
2620  data.len = len;
2621  data.to = to;
2622  store_by_pieces_1 (&data, align);
2623}
2624
2625/* Callback routine for clear_by_pieces.
2626   Return const0_rtx unconditionally.  */
2627
2628static rtx
2629clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2630		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2631		   machine_mode mode ATTRIBUTE_UNUSED)
2632{
2633  return const0_rtx;
2634}
2635
2636/* Subroutine of clear_by_pieces and store_by_pieces.
2637   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2638   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2639
2640static void
2641store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2642		   unsigned int align ATTRIBUTE_UNUSED)
2643{
2644  machine_mode to_addr_mode = get_address_mode (data->to);
2645  rtx to_addr = XEXP (data->to, 0);
2646  unsigned int max_size = STORE_MAX_PIECES + 1;
2647  enum insn_code icode;
2648
2649  data->offset = 0;
2650  data->to_addr = to_addr;
2651  data->autinc_to
2652    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2653       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2654
2655  data->explicit_inc_to = 0;
2656  data->reverse
2657    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2658  if (data->reverse)
2659    data->offset = data->len;
2660
2661  /* If storing requires more than two move insns,
2662     copy addresses to registers (to make displacements shorter)
2663     and use post-increment if available.  */
2664  if (!data->autinc_to
2665      && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2666    {
2667      /* Determine the main mode we'll be using.
2668	 MODE might not be used depending on the definitions of the
2669	 USE_* macros below.  */
2670      machine_mode mode ATTRIBUTE_UNUSED
2671	= widest_int_mode_for_size (max_size);
2672
2673      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2674	{
2675	  data->to_addr = copy_to_mode_reg (to_addr_mode,
2676					    plus_constant (to_addr_mode,
2677							   to_addr,
2678							   data->len));
2679	  data->autinc_to = 1;
2680	  data->explicit_inc_to = -1;
2681	}
2682
2683      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2684	  && ! data->autinc_to)
2685	{
2686	  data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2687	  data->autinc_to = 1;
2688	  data->explicit_inc_to = 1;
2689	}
2690
2691      if ( !data->autinc_to && CONSTANT_P (to_addr))
2692	data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2693    }
2694
2695  align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2696
2697  /* First store what we can in the largest integer mode, then go to
2698     successively smaller modes.  */
2699
2700  while (max_size > 1 && data->len > 0)
2701    {
2702      machine_mode mode = widest_int_mode_for_size (max_size);
2703
2704      if (mode == VOIDmode)
2705	break;
2706
2707      icode = optab_handler (mov_optab, mode);
2708      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2709	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2710
2711      max_size = GET_MODE_SIZE (mode);
2712    }
2713
2714  /* The code above should have handled everything.  */
2715  gcc_assert (!data->len);
2716}
2717
2718/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2719   with move instructions for mode MODE.  GENFUN is the gen_... function
2720   to make a move insn for that mode.  DATA has all the other info.  */
2721
2722static void
2723store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2724		   struct store_by_pieces_d *data)
2725{
2726  unsigned int size = GET_MODE_SIZE (mode);
2727  rtx to1, cst;
2728
2729  while (data->len >= size)
2730    {
2731      if (data->reverse)
2732	data->offset -= size;
2733
2734      if (data->autinc_to)
2735	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2736					 data->offset);
2737      else
2738	to1 = adjust_address (data->to, mode, data->offset);
2739
2740      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2741	emit_insn (gen_add2_insn (data->to_addr,
2742				  gen_int_mode (-(HOST_WIDE_INT) size,
2743						GET_MODE (data->to_addr))));
2744
2745      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2746      emit_insn ((*genfun) (to1, cst));
2747
2748      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2749	emit_insn (gen_add2_insn (data->to_addr,
2750				  gen_int_mode (size,
2751						GET_MODE (data->to_addr))));
2752
2753      if (! data->reverse)
2754	data->offset += size;
2755
2756      data->len -= size;
2757    }
2758}
2759
2760/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2761   its length in bytes.  */
2762
2763rtx
2764clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2765		     unsigned int expected_align, HOST_WIDE_INT expected_size,
2766		     unsigned HOST_WIDE_INT min_size,
2767		     unsigned HOST_WIDE_INT max_size,
2768		     unsigned HOST_WIDE_INT probable_max_size)
2769{
2770  machine_mode mode = GET_MODE (object);
2771  unsigned int align;
2772
2773  gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2774
2775  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2776     just move a zero.  Otherwise, do this a piece at a time.  */
2777  if (mode != BLKmode
2778      && CONST_INT_P (size)
2779      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2780    {
2781      rtx zero = CONST0_RTX (mode);
2782      if (zero != NULL)
2783	{
2784	  emit_move_insn (object, zero);
2785	  return NULL;
2786	}
2787
2788      if (COMPLEX_MODE_P (mode))
2789	{
2790	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2791	  if (zero != NULL)
2792	    {
2793	      write_complex_part (object, zero, 0);
2794	      write_complex_part (object, zero, 1);
2795	      return NULL;
2796	    }
2797	}
2798    }
2799
2800  if (size == const0_rtx)
2801    return NULL;
2802
2803  align = MEM_ALIGN (object);
2804
2805  if (CONST_INT_P (size)
2806      && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2807						 CLEAR_BY_PIECES,
2808						 optimize_insn_for_speed_p ()))
2809    clear_by_pieces (object, INTVAL (size), align);
2810  else if (set_storage_via_setmem (object, size, const0_rtx, align,
2811				   expected_align, expected_size,
2812				   min_size, max_size, probable_max_size))
2813    ;
2814  else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2815    return set_storage_via_libcall (object, size, const0_rtx,
2816				    method == BLOCK_OP_TAILCALL);
2817  else
2818    gcc_unreachable ();
2819
2820  return NULL;
2821}
2822
2823rtx
2824clear_storage (rtx object, rtx size, enum block_op_methods method)
2825{
2826  unsigned HOST_WIDE_INT max, min = 0;
2827  if (GET_CODE (size) == CONST_INT)
2828    min = max = UINTVAL (size);
2829  else
2830    max = GET_MODE_MASK (GET_MODE (size));
2831  return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2832}
2833
2834
2835/* A subroutine of clear_storage.  Expand a call to memset.
2836   Return the return value of memset, 0 otherwise.  */
2837
2838rtx
2839set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2840{
2841  tree call_expr, fn, object_tree, size_tree, val_tree;
2842  machine_mode size_mode;
2843  rtx retval;
2844
2845  /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2846     place those into new pseudos into a VAR_DECL and use them later.  */
2847
2848  object = copy_addr_to_reg (XEXP (object, 0));
2849
2850  size_mode = TYPE_MODE (sizetype);
2851  size = convert_to_mode (size_mode, size, 1);
2852  size = copy_to_mode_reg (size_mode, size);
2853
2854  /* It is incorrect to use the libcall calling conventions to call
2855     memset in this context.  This could be a user call to memset and
2856     the user may wish to examine the return value from memset.  For
2857     targets where libcalls and normal calls have different conventions
2858     for returning pointers, we could end up generating incorrect code.  */
2859
2860  object_tree = make_tree (ptr_type_node, object);
2861  if (!CONST_INT_P (val))
2862    val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2863  size_tree = make_tree (sizetype, size);
2864  val_tree = make_tree (integer_type_node, val);
2865
2866  fn = clear_storage_libcall_fn (true);
2867  call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2868  CALL_EXPR_TAILCALL (call_expr) = tailcall;
2869
2870  retval = expand_normal (call_expr);
2871
2872  return retval;
2873}
2874
2875/* A subroutine of set_storage_via_libcall.  Create the tree node
2876   for the function we use for block clears.  */
2877
2878tree block_clear_fn;
2879
2880void
2881init_block_clear_fn (const char *asmspec)
2882{
2883  if (!block_clear_fn)
2884    {
2885      tree fn, args;
2886
2887      fn = get_identifier ("memset");
2888      args = build_function_type_list (ptr_type_node, ptr_type_node,
2889				       integer_type_node, sizetype,
2890				       NULL_TREE);
2891
2892      fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2893      DECL_EXTERNAL (fn) = 1;
2894      TREE_PUBLIC (fn) = 1;
2895      DECL_ARTIFICIAL (fn) = 1;
2896      TREE_NOTHROW (fn) = 1;
2897      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2898      DECL_VISIBILITY_SPECIFIED (fn) = 1;
2899
2900      block_clear_fn = fn;
2901    }
2902
2903  if (asmspec)
2904    set_user_assembler_name (block_clear_fn, asmspec);
2905}
2906
2907static tree
2908clear_storage_libcall_fn (int for_call)
2909{
2910  static bool emitted_extern;
2911
2912  if (!block_clear_fn)
2913    init_block_clear_fn (NULL);
2914
2915  if (for_call && !emitted_extern)
2916    {
2917      emitted_extern = true;
2918      make_decl_rtl (block_clear_fn);
2919    }
2920
2921  return block_clear_fn;
2922}
2923
2924/* Expand a setmem pattern; return true if successful.  */
2925
2926bool
2927set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2928			unsigned int expected_align, HOST_WIDE_INT expected_size,
2929			unsigned HOST_WIDE_INT min_size,
2930			unsigned HOST_WIDE_INT max_size,
2931			unsigned HOST_WIDE_INT probable_max_size)
2932{
2933  /* Try the most limited insn first, because there's no point
2934     including more than one in the machine description unless
2935     the more limited one has some advantage.  */
2936
2937  machine_mode mode;
2938
2939  if (expected_align < align)
2940    expected_align = align;
2941  if (expected_size != -1)
2942    {
2943      if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2944	expected_size = max_size;
2945      if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2946	expected_size = min_size;
2947    }
2948
2949  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2950       mode = GET_MODE_WIDER_MODE (mode))
2951    {
2952      enum insn_code code = direct_optab_handler (setmem_optab, mode);
2953
2954      if (code != CODE_FOR_nothing
2955	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2956	     here because if SIZE is less than the mode mask, as it is
2957	     returned by the macro, it will definitely be less than the
2958	     actual mode mask.  Since SIZE is within the Pmode address
2959	     space, we limit MODE to Pmode.  */
2960	  && ((CONST_INT_P (size)
2961	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2962		   <= (GET_MODE_MASK (mode) >> 1)))
2963	      || max_size <= (GET_MODE_MASK (mode) >> 1)
2964	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2965	{
2966	  struct expand_operand ops[9];
2967	  unsigned int nops;
2968
2969	  nops = insn_data[(int) code].n_generator_args;
2970	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2971
2972	  create_fixed_operand (&ops[0], object);
2973	  /* The check above guarantees that this size conversion is valid.  */
2974	  create_convert_operand_to (&ops[1], size, mode, true);
2975	  create_convert_operand_from (&ops[2], val, byte_mode, true);
2976	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2977	  if (nops >= 6)
2978	    {
2979	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2980	      create_integer_operand (&ops[5], expected_size);
2981	    }
2982	  if (nops >= 8)
2983	    {
2984	      create_integer_operand (&ops[6], min_size);
2985	      /* If we can not represent the maximal size,
2986		 make parameter NULL.  */
2987	      if ((HOST_WIDE_INT) max_size != -1)
2988	        create_integer_operand (&ops[7], max_size);
2989	      else
2990		create_fixed_operand (&ops[7], NULL);
2991	    }
2992	  if (nops == 9)
2993	    {
2994	      /* If we can not represent the maximal size,
2995		 make parameter NULL.  */
2996	      if ((HOST_WIDE_INT) probable_max_size != -1)
2997	        create_integer_operand (&ops[8], probable_max_size);
2998	      else
2999		create_fixed_operand (&ops[8], NULL);
3000	    }
3001	  if (maybe_expand_insn (code, nops, ops))
3002	    return true;
3003	}
3004    }
3005
3006  return false;
3007}
3008
3009
3010/* Write to one of the components of the complex value CPLX.  Write VAL to
3011   the real part if IMAG_P is false, and the imaginary part if its true.  */
3012
3013void
3014write_complex_part (rtx cplx, rtx val, bool imag_p)
3015{
3016  machine_mode cmode;
3017  machine_mode imode;
3018  unsigned ibitsize;
3019
3020  if (GET_CODE (cplx) == CONCAT)
3021    {
3022      emit_move_insn (XEXP (cplx, imag_p), val);
3023      return;
3024    }
3025
3026  cmode = GET_MODE (cplx);
3027  imode = GET_MODE_INNER (cmode);
3028  ibitsize = GET_MODE_BITSIZE (imode);
3029
3030  /* For MEMs simplify_gen_subreg may generate an invalid new address
3031     because, e.g., the original address is considered mode-dependent
3032     by the target, which restricts simplify_subreg from invoking
3033     adjust_address_nv.  Instead of preparing fallback support for an
3034     invalid address, we call adjust_address_nv directly.  */
3035  if (MEM_P (cplx))
3036    {
3037      emit_move_insn (adjust_address_nv (cplx, imode,
3038					 imag_p ? GET_MODE_SIZE (imode) : 0),
3039		      val);
3040      return;
3041    }
3042
3043  /* If the sub-object is at least word sized, then we know that subregging
3044     will work.  This special case is important, since store_bit_field
3045     wants to operate on integer modes, and there's rarely an OImode to
3046     correspond to TCmode.  */
3047  if (ibitsize >= BITS_PER_WORD
3048      /* For hard regs we have exact predicates.  Assume we can split
3049	 the original object if it spans an even number of hard regs.
3050	 This special case is important for SCmode on 64-bit platforms
3051	 where the natural size of floating-point regs is 32-bit.  */
3052      || (REG_P (cplx)
3053	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3054	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3055    {
3056      rtx part = simplify_gen_subreg (imode, cplx, cmode,
3057				      imag_p ? GET_MODE_SIZE (imode) : 0);
3058      if (part)
3059        {
3060	  emit_move_insn (part, val);
3061	  return;
3062	}
3063      else
3064	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3065	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3066    }
3067
3068  store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3069}
3070
3071/* Extract one of the components of the complex value CPLX.  Extract the
3072   real part if IMAG_P is false, and the imaginary part if it's true.  */
3073
3074static rtx
3075read_complex_part (rtx cplx, bool imag_p)
3076{
3077  machine_mode cmode, imode;
3078  unsigned ibitsize;
3079
3080  if (GET_CODE (cplx) == CONCAT)
3081    return XEXP (cplx, imag_p);
3082
3083  cmode = GET_MODE (cplx);
3084  imode = GET_MODE_INNER (cmode);
3085  ibitsize = GET_MODE_BITSIZE (imode);
3086
3087  /* Special case reads from complex constants that got spilled to memory.  */
3088  if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3089    {
3090      tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3091      if (decl && TREE_CODE (decl) == COMPLEX_CST)
3092	{
3093	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3094	  if (CONSTANT_CLASS_P (part))
3095	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3096	}
3097    }
3098
3099  /* For MEMs simplify_gen_subreg may generate an invalid new address
3100     because, e.g., the original address is considered mode-dependent
3101     by the target, which restricts simplify_subreg from invoking
3102     adjust_address_nv.  Instead of preparing fallback support for an
3103     invalid address, we call adjust_address_nv directly.  */
3104  if (MEM_P (cplx))
3105    return adjust_address_nv (cplx, imode,
3106			      imag_p ? GET_MODE_SIZE (imode) : 0);
3107
3108  /* If the sub-object is at least word sized, then we know that subregging
3109     will work.  This special case is important, since extract_bit_field
3110     wants to operate on integer modes, and there's rarely an OImode to
3111     correspond to TCmode.  */
3112  if (ibitsize >= BITS_PER_WORD
3113      /* For hard regs we have exact predicates.  Assume we can split
3114	 the original object if it spans an even number of hard regs.
3115	 This special case is important for SCmode on 64-bit platforms
3116	 where the natural size of floating-point regs is 32-bit.  */
3117      || (REG_P (cplx)
3118	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3119	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3120    {
3121      rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3122				     imag_p ? GET_MODE_SIZE (imode) : 0);
3123      if (ret)
3124        return ret;
3125      else
3126	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3127	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3128    }
3129
3130  return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3131			    true, NULL_RTX, imode, imode);
3132}
3133
3134/* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
3135   NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
3136   represented in NEW_MODE.  If FORCE is true, this will never happen, as
3137   we'll force-create a SUBREG if needed.  */
3138
3139static rtx
3140emit_move_change_mode (machine_mode new_mode,
3141		       machine_mode old_mode, rtx x, bool force)
3142{
3143  rtx ret;
3144
3145  if (push_operand (x, GET_MODE (x)))
3146    {
3147      ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3148      MEM_COPY_ATTRIBUTES (ret, x);
3149    }
3150  else if (MEM_P (x))
3151    {
3152      /* We don't have to worry about changing the address since the
3153	 size in bytes is supposed to be the same.  */
3154      if (reload_in_progress)
3155	{
3156	  /* Copy the MEM to change the mode and move any
3157	     substitutions from the old MEM to the new one.  */
3158	  ret = adjust_address_nv (x, new_mode, 0);
3159	  copy_replacements (x, ret);
3160	}
3161      else
3162	ret = adjust_address (x, new_mode, 0);
3163    }
3164  else
3165    {
3166      /* Note that we do want simplify_subreg's behavior of validating
3167	 that the new mode is ok for a hard register.  If we were to use
3168	 simplify_gen_subreg, we would create the subreg, but would
3169	 probably run into the target not being able to implement it.  */
3170      /* Except, of course, when FORCE is true, when this is exactly what
3171	 we want.  Which is needed for CCmodes on some targets.  */
3172      if (force)
3173	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3174      else
3175	ret = simplify_subreg (new_mode, x, old_mode, 0);
3176    }
3177
3178  return ret;
3179}
3180
3181/* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3182   an integer mode of the same size as MODE.  Returns the instruction
3183   emitted, or NULL if such a move could not be generated.  */
3184
3185static rtx_insn *
3186emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3187{
3188  machine_mode imode;
3189  enum insn_code code;
3190
3191  /* There must exist a mode of the exact size we require.  */
3192  imode = int_mode_for_mode (mode);
3193  if (imode == BLKmode)
3194    return NULL;
3195
3196  /* The target must support moves in this mode.  */
3197  code = optab_handler (mov_optab, imode);
3198  if (code == CODE_FOR_nothing)
3199    return NULL;
3200
3201  x = emit_move_change_mode (imode, mode, x, force);
3202  if (x == NULL_RTX)
3203    return NULL;
3204  y = emit_move_change_mode (imode, mode, y, force);
3205  if (y == NULL_RTX)
3206    return NULL;
3207  return emit_insn (GEN_FCN (code) (x, y));
3208}
3209
3210/* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3211   Return an equivalent MEM that does not use an auto-increment.  */
3212
3213rtx
3214emit_move_resolve_push (machine_mode mode, rtx x)
3215{
3216  enum rtx_code code = GET_CODE (XEXP (x, 0));
3217  HOST_WIDE_INT adjust;
3218  rtx temp;
3219
3220  adjust = GET_MODE_SIZE (mode);
3221#ifdef PUSH_ROUNDING
3222  adjust = PUSH_ROUNDING (adjust);
3223#endif
3224  if (code == PRE_DEC || code == POST_DEC)
3225    adjust = -adjust;
3226  else if (code == PRE_MODIFY || code == POST_MODIFY)
3227    {
3228      rtx expr = XEXP (XEXP (x, 0), 1);
3229      HOST_WIDE_INT val;
3230
3231      gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3232      gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3233      val = INTVAL (XEXP (expr, 1));
3234      if (GET_CODE (expr) == MINUS)
3235	val = -val;
3236      gcc_assert (adjust == val || adjust == -val);
3237      adjust = val;
3238    }
3239
3240  /* Do not use anti_adjust_stack, since we don't want to update
3241     stack_pointer_delta.  */
3242  temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3243			      gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3244			      0, OPTAB_LIB_WIDEN);
3245  if (temp != stack_pointer_rtx)
3246    emit_move_insn (stack_pointer_rtx, temp);
3247
3248  switch (code)
3249    {
3250    case PRE_INC:
3251    case PRE_DEC:
3252    case PRE_MODIFY:
3253      temp = stack_pointer_rtx;
3254      break;
3255    case POST_INC:
3256    case POST_DEC:
3257    case POST_MODIFY:
3258      temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3259      break;
3260    default:
3261      gcc_unreachable ();
3262    }
3263
3264  return replace_equiv_address (x, temp);
3265}
3266
3267/* A subroutine of emit_move_complex.  Generate a move from Y into X.
3268   X is known to satisfy push_operand, and MODE is known to be complex.
3269   Returns the last instruction emitted.  */
3270
3271rtx_insn *
3272emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3273{
3274  machine_mode submode = GET_MODE_INNER (mode);
3275  bool imag_first;
3276
3277#ifdef PUSH_ROUNDING
3278  unsigned int submodesize = GET_MODE_SIZE (submode);
3279
3280  /* In case we output to the stack, but the size is smaller than the
3281     machine can push exactly, we need to use move instructions.  */
3282  if (PUSH_ROUNDING (submodesize) != submodesize)
3283    {
3284      x = emit_move_resolve_push (mode, x);
3285      return emit_move_insn (x, y);
3286    }
3287#endif
3288
3289  /* Note that the real part always precedes the imag part in memory
3290     regardless of machine's endianness.  */
3291  switch (GET_CODE (XEXP (x, 0)))
3292    {
3293    case PRE_DEC:
3294    case POST_DEC:
3295      imag_first = true;
3296      break;
3297    case PRE_INC:
3298    case POST_INC:
3299      imag_first = false;
3300      break;
3301    default:
3302      gcc_unreachable ();
3303    }
3304
3305  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3306		  read_complex_part (y, imag_first));
3307  return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3308			 read_complex_part (y, !imag_first));
3309}
3310
3311/* A subroutine of emit_move_complex.  Perform the move from Y to X
3312   via two moves of the parts.  Returns the last instruction emitted.  */
3313
3314rtx_insn *
3315emit_move_complex_parts (rtx x, rtx y)
3316{
3317  /* Show the output dies here.  This is necessary for SUBREGs
3318     of pseudos since we cannot track their lifetimes correctly;
3319     hard regs shouldn't appear here except as return values.  */
3320  if (!reload_completed && !reload_in_progress
3321      && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3322    emit_clobber (x);
3323
3324  write_complex_part (x, read_complex_part (y, false), false);
3325  write_complex_part (x, read_complex_part (y, true), true);
3326
3327  return get_last_insn ();
3328}
3329
3330/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3331   MODE is known to be complex.  Returns the last instruction emitted.  */
3332
3333static rtx_insn *
3334emit_move_complex (machine_mode mode, rtx x, rtx y)
3335{
3336  bool try_int;
3337
3338  /* Need to take special care for pushes, to maintain proper ordering
3339     of the data, and possibly extra padding.  */
3340  if (push_operand (x, mode))
3341    return emit_move_complex_push (mode, x, y);
3342
3343  /* See if we can coerce the target into moving both values at once, except
3344     for floating point where we favor moving as parts if this is easy.  */
3345  if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3346      && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3347      && !(REG_P (x)
3348	   && HARD_REGISTER_P (x)
3349	   && hard_regno_nregs[REGNO (x)][mode] == 1)
3350      && !(REG_P (y)
3351	   && HARD_REGISTER_P (y)
3352	   && hard_regno_nregs[REGNO (y)][mode] == 1))
3353    try_int = false;
3354  /* Not possible if the values are inherently not adjacent.  */
3355  else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3356    try_int = false;
3357  /* Is possible if both are registers (or subregs of registers).  */
3358  else if (register_operand (x, mode) && register_operand (y, mode))
3359    try_int = true;
3360  /* If one of the operands is a memory, and alignment constraints
3361     are friendly enough, we may be able to do combined memory operations.
3362     We do not attempt this if Y is a constant because that combination is
3363     usually better with the by-parts thing below.  */
3364  else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3365	   && (!STRICT_ALIGNMENT
3366	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3367    try_int = true;
3368  else
3369    try_int = false;
3370
3371  if (try_int)
3372    {
3373      rtx_insn *ret;
3374
3375      /* For memory to memory moves, optimal behavior can be had with the
3376	 existing block move logic.  */
3377      if (MEM_P (x) && MEM_P (y))
3378	{
3379	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3380			   BLOCK_OP_NO_LIBCALL);
3381	  return get_last_insn ();
3382	}
3383
3384      ret = emit_move_via_integer (mode, x, y, true);
3385      if (ret)
3386	return ret;
3387    }
3388
3389  return emit_move_complex_parts (x, y);
3390}
3391
3392/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3393   MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3394
3395static rtx_insn *
3396emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3397{
3398  rtx_insn *ret;
3399
3400  /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3401  if (mode != CCmode)
3402    {
3403      enum insn_code code = optab_handler (mov_optab, CCmode);
3404      if (code != CODE_FOR_nothing)
3405	{
3406	  x = emit_move_change_mode (CCmode, mode, x, true);
3407	  y = emit_move_change_mode (CCmode, mode, y, true);
3408	  return emit_insn (GEN_FCN (code) (x, y));
3409	}
3410    }
3411
3412  /* Otherwise, find the MODE_INT mode of the same width.  */
3413  ret = emit_move_via_integer (mode, x, y, false);
3414  gcc_assert (ret != NULL);
3415  return ret;
3416}
3417
3418/* Return true if word I of OP lies entirely in the
3419   undefined bits of a paradoxical subreg.  */
3420
3421static bool
3422undefined_operand_subword_p (const_rtx op, int i)
3423{
3424  machine_mode innermode, innermostmode;
3425  int offset;
3426  if (GET_CODE (op) != SUBREG)
3427    return false;
3428  innermode = GET_MODE (op);
3429  innermostmode = GET_MODE (SUBREG_REG (op));
3430  offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3431  /* The SUBREG_BYTE represents offset, as if the value were stored in
3432     memory, except for a paradoxical subreg where we define
3433     SUBREG_BYTE to be 0; undo this exception as in
3434     simplify_subreg.  */
3435  if (SUBREG_BYTE (op) == 0
3436      && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3437    {
3438      int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3439      if (WORDS_BIG_ENDIAN)
3440	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3441      if (BYTES_BIG_ENDIAN)
3442	offset += difference % UNITS_PER_WORD;
3443    }
3444  if (offset >= GET_MODE_SIZE (innermostmode)
3445      || offset <= -GET_MODE_SIZE (word_mode))
3446    return true;
3447  return false;
3448}
3449
3450/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3451   MODE is any multi-word or full-word mode that lacks a move_insn
3452   pattern.  Note that you will get better code if you define such
3453   patterns, even if they must turn into multiple assembler instructions.  */
3454
3455static rtx_insn *
3456emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3457{
3458  rtx_insn *last_insn = 0;
3459  rtx_insn *seq;
3460  rtx inner;
3461  bool need_clobber;
3462  int i;
3463
3464  gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3465
3466  /* If X is a push on the stack, do the push now and replace
3467     X with a reference to the stack pointer.  */
3468  if (push_operand (x, mode))
3469    x = emit_move_resolve_push (mode, x);
3470
3471  /* If we are in reload, see if either operand is a MEM whose address
3472     is scheduled for replacement.  */
3473  if (reload_in_progress && MEM_P (x)
3474      && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3475    x = replace_equiv_address_nv (x, inner);
3476  if (reload_in_progress && MEM_P (y)
3477      && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3478    y = replace_equiv_address_nv (y, inner);
3479
3480  start_sequence ();
3481
3482  need_clobber = false;
3483  for (i = 0;
3484       i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3485       i++)
3486    {
3487      rtx xpart = operand_subword (x, i, 1, mode);
3488      rtx ypart;
3489
3490      /* Do not generate code for a move if it would come entirely
3491	 from the undefined bits of a paradoxical subreg.  */
3492      if (undefined_operand_subword_p (y, i))
3493	continue;
3494
3495      ypart = operand_subword (y, i, 1, mode);
3496
3497      /* If we can't get a part of Y, put Y into memory if it is a
3498	 constant.  Otherwise, force it into a register.  Then we must
3499	 be able to get a part of Y.  */
3500      if (ypart == 0 && CONSTANT_P (y))
3501	{
3502	  y = use_anchored_address (force_const_mem (mode, y));
3503	  ypart = operand_subword (y, i, 1, mode);
3504	}
3505      else if (ypart == 0)
3506	ypart = operand_subword_force (y, i, mode);
3507
3508      gcc_assert (xpart && ypart);
3509
3510      need_clobber |= (GET_CODE (xpart) == SUBREG);
3511
3512      last_insn = emit_move_insn (xpart, ypart);
3513    }
3514
3515  seq = get_insns ();
3516  end_sequence ();
3517
3518  /* Show the output dies here.  This is necessary for SUBREGs
3519     of pseudos since we cannot track their lifetimes correctly;
3520     hard regs shouldn't appear here except as return values.
3521     We never want to emit such a clobber after reload.  */
3522  if (x != y
3523      && ! (reload_in_progress || reload_completed)
3524      && need_clobber != 0)
3525    emit_clobber (x);
3526
3527  emit_insn (seq);
3528
3529  return last_insn;
3530}
3531
3532/* Low level part of emit_move_insn.
3533   Called just like emit_move_insn, but assumes X and Y
3534   are basically valid.  */
3535
3536rtx_insn *
3537emit_move_insn_1 (rtx x, rtx y)
3538{
3539  machine_mode mode = GET_MODE (x);
3540  enum insn_code code;
3541
3542  gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3543
3544  code = optab_handler (mov_optab, mode);
3545  if (code != CODE_FOR_nothing)
3546    return emit_insn (GEN_FCN (code) (x, y));
3547
3548  /* Expand complex moves by moving real part and imag part.  */
3549  if (COMPLEX_MODE_P (mode))
3550    return emit_move_complex (mode, x, y);
3551
3552  if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3553      || ALL_FIXED_POINT_MODE_P (mode))
3554    {
3555      rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3556
3557      /* If we can't find an integer mode, use multi words.  */
3558      if (result)
3559	return result;
3560      else
3561	return emit_move_multi_word (mode, x, y);
3562    }
3563
3564  if (GET_MODE_CLASS (mode) == MODE_CC)
3565    return emit_move_ccmode (mode, x, y);
3566
3567  /* Try using a move pattern for the corresponding integer mode.  This is
3568     only safe when simplify_subreg can convert MODE constants into integer
3569     constants.  At present, it can only do this reliably if the value
3570     fits within a HOST_WIDE_INT.  */
3571  if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3572    {
3573      rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3574
3575      if (ret)
3576	{
3577	  if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3578	    return ret;
3579	}
3580    }
3581
3582  return emit_move_multi_word (mode, x, y);
3583}
3584
3585/* Generate code to copy Y into X.
3586   Both Y and X must have the same mode, except that
3587   Y can be a constant with VOIDmode.
3588   This mode cannot be BLKmode; use emit_block_move for that.
3589
3590   Return the last instruction emitted.  */
3591
3592rtx_insn *
3593emit_move_insn (rtx x, rtx y)
3594{
3595  machine_mode mode = GET_MODE (x);
3596  rtx y_cst = NULL_RTX;
3597  rtx_insn *last_insn;
3598  rtx set;
3599
3600  gcc_assert (mode != BLKmode
3601	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3602
3603  if (CONSTANT_P (y))
3604    {
3605      if (optimize
3606	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3607	  && (last_insn = compress_float_constant (x, y)))
3608	return last_insn;
3609
3610      y_cst = y;
3611
3612      if (!targetm.legitimate_constant_p (mode, y))
3613	{
3614	  y = force_const_mem (mode, y);
3615
3616	  /* If the target's cannot_force_const_mem prevented the spill,
3617	     assume that the target's move expanders will also take care
3618	     of the non-legitimate constant.  */
3619	  if (!y)
3620	    y = y_cst;
3621	  else
3622	    y = use_anchored_address (y);
3623	}
3624    }
3625
3626  /* If X or Y are memory references, verify that their addresses are valid
3627     for the machine.  */
3628  if (MEM_P (x)
3629      && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3630					 MEM_ADDR_SPACE (x))
3631	  && ! push_operand (x, GET_MODE (x))))
3632    x = validize_mem (x);
3633
3634  if (MEM_P (y)
3635      && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3636					MEM_ADDR_SPACE (y)))
3637    y = validize_mem (y);
3638
3639  gcc_assert (mode != BLKmode);
3640
3641  last_insn = emit_move_insn_1 (x, y);
3642
3643  if (y_cst && REG_P (x)
3644      && (set = single_set (last_insn)) != NULL_RTX
3645      && SET_DEST (set) == x
3646      && ! rtx_equal_p (y_cst, SET_SRC (set)))
3647    set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3648
3649  return last_insn;
3650}
3651
3652/* Generate the body of an instruction to copy Y into X.
3653   It may be a list of insns, if one insn isn't enough.  */
3654
3655rtx
3656gen_move_insn (rtx x, rtx y)
3657{
3658  rtx_insn *seq;
3659
3660  start_sequence ();
3661  emit_move_insn_1 (x, y);
3662  seq = get_insns ();
3663  end_sequence ();
3664  return seq;
3665}
3666
3667/* If Y is representable exactly in a narrower mode, and the target can
3668   perform the extension directly from constant or memory, then emit the
3669   move as an extension.  */
3670
3671static rtx_insn *
3672compress_float_constant (rtx x, rtx y)
3673{
3674  machine_mode dstmode = GET_MODE (x);
3675  machine_mode orig_srcmode = GET_MODE (y);
3676  machine_mode srcmode;
3677  REAL_VALUE_TYPE r;
3678  int oldcost, newcost;
3679  bool speed = optimize_insn_for_speed_p ();
3680
3681  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3682
3683  if (targetm.legitimate_constant_p (dstmode, y))
3684    oldcost = set_src_cost (y, speed);
3685  else
3686    oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3687
3688  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3689       srcmode != orig_srcmode;
3690       srcmode = GET_MODE_WIDER_MODE (srcmode))
3691    {
3692      enum insn_code ic;
3693      rtx trunc_y;
3694      rtx_insn *last_insn;
3695
3696      /* Skip if the target can't extend this way.  */
3697      ic = can_extend_p (dstmode, srcmode, 0);
3698      if (ic == CODE_FOR_nothing)
3699	continue;
3700
3701      /* Skip if the narrowed value isn't exact.  */
3702      if (! exact_real_truncate (srcmode, &r))
3703	continue;
3704
3705      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3706
3707      if (targetm.legitimate_constant_p (srcmode, trunc_y))
3708	{
3709	  /* Skip if the target needs extra instructions to perform
3710	     the extension.  */
3711	  if (!insn_operand_matches (ic, 1, trunc_y))
3712	    continue;
3713	  /* This is valid, but may not be cheaper than the original. */
3714	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3715				  speed);
3716	  if (oldcost < newcost)
3717	    continue;
3718	}
3719      else if (float_extend_from_mem[dstmode][srcmode])
3720	{
3721	  trunc_y = force_const_mem (srcmode, trunc_y);
3722	  /* This is valid, but may not be cheaper than the original. */
3723	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3724				  speed);
3725	  if (oldcost < newcost)
3726	    continue;
3727	  trunc_y = validize_mem (trunc_y);
3728	}
3729      else
3730	continue;
3731
3732      /* For CSE's benefit, force the compressed constant pool entry
3733	 into a new pseudo.  This constant may be used in different modes,
3734	 and if not, combine will put things back together for us.  */
3735      trunc_y = force_reg (srcmode, trunc_y);
3736
3737      /* If x is a hard register, perform the extension into a pseudo,
3738	 so that e.g. stack realignment code is aware of it.  */
3739      rtx target = x;
3740      if (REG_P (x) && HARD_REGISTER_P (x))
3741	target = gen_reg_rtx (dstmode);
3742
3743      emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3744      last_insn = get_last_insn ();
3745
3746      if (REG_P (target))
3747	set_unique_reg_note (last_insn, REG_EQUAL, y);
3748
3749      if (target != x)
3750	return emit_move_insn (x, target);
3751      return last_insn;
3752    }
3753
3754  return NULL;
3755}
3756
3757/* Pushing data onto the stack.  */
3758
3759/* Push a block of length SIZE (perhaps variable)
3760   and return an rtx to address the beginning of the block.
3761   The value may be virtual_outgoing_args_rtx.
3762
3763   EXTRA is the number of bytes of padding to push in addition to SIZE.
3764   BELOW nonzero means this padding comes at low addresses;
3765   otherwise, the padding comes at high addresses.  */
3766
3767rtx
3768push_block (rtx size, int extra, int below)
3769{
3770  rtx temp;
3771
3772  size = convert_modes (Pmode, ptr_mode, size, 1);
3773  if (CONSTANT_P (size))
3774    anti_adjust_stack (plus_constant (Pmode, size, extra));
3775  else if (REG_P (size) && extra == 0)
3776    anti_adjust_stack (size);
3777  else
3778    {
3779      temp = copy_to_mode_reg (Pmode, size);
3780      if (extra != 0)
3781	temp = expand_binop (Pmode, add_optab, temp,
3782			     gen_int_mode (extra, Pmode),
3783			     temp, 0, OPTAB_LIB_WIDEN);
3784      anti_adjust_stack (temp);
3785    }
3786
3787#ifndef STACK_GROWS_DOWNWARD
3788  if (0)
3789#else
3790  if (1)
3791#endif
3792    {
3793      temp = virtual_outgoing_args_rtx;
3794      if (extra != 0 && below)
3795	temp = plus_constant (Pmode, temp, extra);
3796    }
3797  else
3798    {
3799      if (CONST_INT_P (size))
3800	temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3801			      -INTVAL (size) - (below ? 0 : extra));
3802      else if (extra != 0 && !below)
3803	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3804			     negate_rtx (Pmode, plus_constant (Pmode, size,
3805							       extra)));
3806      else
3807	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3808			     negate_rtx (Pmode, size));
3809    }
3810
3811  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3812}
3813
3814/* A utility routine that returns the base of an auto-inc memory, or NULL.  */
3815
3816static rtx
3817mem_autoinc_base (rtx mem)
3818{
3819  if (MEM_P (mem))
3820    {
3821      rtx addr = XEXP (mem, 0);
3822      if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3823	return XEXP (addr, 0);
3824    }
3825  return NULL;
3826}
3827
3828/* A utility routine used here, in reload, and in try_split.  The insns
3829   after PREV up to and including LAST are known to adjust the stack,
3830   with a final value of END_ARGS_SIZE.  Iterate backward from LAST
3831   placing notes as appropriate.  PREV may be NULL, indicating the
3832   entire insn sequence prior to LAST should be scanned.
3833
3834   The set of allowed stack pointer modifications is small:
3835     (1) One or more auto-inc style memory references (aka pushes),
3836     (2) One or more addition/subtraction with the SP as destination,
3837     (3) A single move insn with the SP as destination,
3838     (4) A call_pop insn,
3839     (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3840
3841   Insns in the sequence that do not modify the SP are ignored,
3842   except for noreturn calls.
3843
3844   The return value is the amount of adjustment that can be trivially
3845   verified, via immediate operand or auto-inc.  If the adjustment
3846   cannot be trivially extracted, the return value is INT_MIN.  */
3847
3848HOST_WIDE_INT
3849find_args_size_adjust (rtx_insn *insn)
3850{
3851  rtx dest, set, pat;
3852  int i;
3853
3854  pat = PATTERN (insn);
3855  set = NULL;
3856
3857  /* Look for a call_pop pattern.  */
3858  if (CALL_P (insn))
3859    {
3860      /* We have to allow non-call_pop patterns for the case
3861	 of emit_single_push_insn of a TLS address.  */
3862      if (GET_CODE (pat) != PARALLEL)
3863	return 0;
3864
3865      /* All call_pop have a stack pointer adjust in the parallel.
3866	 The call itself is always first, and the stack adjust is
3867	 usually last, so search from the end.  */
3868      for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3869	{
3870	  set = XVECEXP (pat, 0, i);
3871	  if (GET_CODE (set) != SET)
3872	    continue;
3873	  dest = SET_DEST (set);
3874	  if (dest == stack_pointer_rtx)
3875	    break;
3876	}
3877      /* We'd better have found the stack pointer adjust.  */
3878      if (i == 0)
3879	return 0;
3880      /* Fall through to process the extracted SET and DEST
3881	 as if it was a standalone insn.  */
3882    }
3883  else if (GET_CODE (pat) == SET)
3884    set = pat;
3885  else if ((set = single_set (insn)) != NULL)
3886    ;
3887  else if (GET_CODE (pat) == PARALLEL)
3888    {
3889      /* ??? Some older ports use a parallel with a stack adjust
3890	 and a store for a PUSH_ROUNDING pattern, rather than a
3891	 PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
3892      /* ??? See h8300 and m68k, pushqi1.  */
3893      for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3894	{
3895	  set = XVECEXP (pat, 0, i);
3896	  if (GET_CODE (set) != SET)
3897	    continue;
3898	  dest = SET_DEST (set);
3899	  if (dest == stack_pointer_rtx)
3900	    break;
3901
3902	  /* We do not expect an auto-inc of the sp in the parallel.  */
3903	  gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3904	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3905			       != stack_pointer_rtx);
3906	}
3907      if (i < 0)
3908	return 0;
3909    }
3910  else
3911    return 0;
3912
3913  dest = SET_DEST (set);
3914
3915  /* Look for direct modifications of the stack pointer.  */
3916  if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3917    {
3918      /* Look for a trivial adjustment, otherwise assume nothing.  */
3919      /* Note that the SPU restore_stack_block pattern refers to
3920	 the stack pointer in V4SImode.  Consider that non-trivial.  */
3921      if (SCALAR_INT_MODE_P (GET_MODE (dest))
3922	  && GET_CODE (SET_SRC (set)) == PLUS
3923	  && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3924	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3925	return INTVAL (XEXP (SET_SRC (set), 1));
3926      /* ??? Reload can generate no-op moves, which will be cleaned
3927	 up later.  Recognize it and continue searching.  */
3928      else if (rtx_equal_p (dest, SET_SRC (set)))
3929	return 0;
3930      else
3931	return HOST_WIDE_INT_MIN;
3932    }
3933  else
3934    {
3935      rtx mem, addr;
3936
3937      /* Otherwise only think about autoinc patterns.  */
3938      if (mem_autoinc_base (dest) == stack_pointer_rtx)
3939	{
3940	  mem = dest;
3941	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3942			       != stack_pointer_rtx);
3943	}
3944      else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3945	mem = SET_SRC (set);
3946      else
3947	return 0;
3948
3949      addr = XEXP (mem, 0);
3950      switch (GET_CODE (addr))
3951	{
3952	case PRE_INC:
3953	case POST_INC:
3954	  return GET_MODE_SIZE (GET_MODE (mem));
3955	case PRE_DEC:
3956	case POST_DEC:
3957	  return -GET_MODE_SIZE (GET_MODE (mem));
3958	case PRE_MODIFY:
3959	case POST_MODIFY:
3960	  addr = XEXP (addr, 1);
3961	  gcc_assert (GET_CODE (addr) == PLUS);
3962	  gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3963	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3964	  return INTVAL (XEXP (addr, 1));
3965	default:
3966	  gcc_unreachable ();
3967	}
3968    }
3969}
3970
3971int
3972fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3973{
3974  int args_size = end_args_size;
3975  bool saw_unknown = false;
3976  rtx_insn *insn;
3977
3978  for (insn = last; insn != prev; insn = PREV_INSN (insn))
3979    {
3980      HOST_WIDE_INT this_delta;
3981
3982      if (!NONDEBUG_INSN_P (insn))
3983	continue;
3984
3985      this_delta = find_args_size_adjust (insn);
3986      if (this_delta == 0)
3987	{
3988	  if (!CALL_P (insn)
3989	      || ACCUMULATE_OUTGOING_ARGS
3990	      || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3991	    continue;
3992	}
3993
3994      gcc_assert (!saw_unknown);
3995      if (this_delta == HOST_WIDE_INT_MIN)
3996	saw_unknown = true;
3997
3998      add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3999#ifdef STACK_GROWS_DOWNWARD
4000      this_delta = -(unsigned HOST_WIDE_INT) this_delta;
4001#endif
4002      args_size -= this_delta;
4003    }
4004
4005  return saw_unknown ? INT_MIN : args_size;
4006}
4007
4008#ifdef PUSH_ROUNDING
4009/* Emit single push insn.  */
4010
4011static void
4012emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4013{
4014  rtx dest_addr;
4015  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4016  rtx dest;
4017  enum insn_code icode;
4018
4019  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4020  /* If there is push pattern, use it.  Otherwise try old way of throwing
4021     MEM representing push operation to move expander.  */
4022  icode = optab_handler (push_optab, mode);
4023  if (icode != CODE_FOR_nothing)
4024    {
4025      struct expand_operand ops[1];
4026
4027      create_input_operand (&ops[0], x, mode);
4028      if (maybe_expand_insn (icode, 1, ops))
4029	return;
4030    }
4031  if (GET_MODE_SIZE (mode) == rounded_size)
4032    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4033  /* If we are to pad downward, adjust the stack pointer first and
4034     then store X into the stack location using an offset.  This is
4035     because emit_move_insn does not know how to pad; it does not have
4036     access to type.  */
4037  else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4038    {
4039      unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4040      HOST_WIDE_INT offset;
4041
4042      emit_move_insn (stack_pointer_rtx,
4043		      expand_binop (Pmode,
4044#ifdef STACK_GROWS_DOWNWARD
4045				    sub_optab,
4046#else
4047				    add_optab,
4048#endif
4049				    stack_pointer_rtx,
4050				    gen_int_mode (rounded_size, Pmode),
4051				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
4052
4053      offset = (HOST_WIDE_INT) padding_size;
4054#ifdef STACK_GROWS_DOWNWARD
4055      if (STACK_PUSH_CODE == POST_DEC)
4056	/* We have already decremented the stack pointer, so get the
4057	   previous value.  */
4058	offset += (HOST_WIDE_INT) rounded_size;
4059#else
4060      if (STACK_PUSH_CODE == POST_INC)
4061	/* We have already incremented the stack pointer, so get the
4062	   previous value.  */
4063	offset -= (HOST_WIDE_INT) rounded_size;
4064#endif
4065      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4066				gen_int_mode (offset, Pmode));
4067    }
4068  else
4069    {
4070#ifdef STACK_GROWS_DOWNWARD
4071      /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
4072      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4073				gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4074					      Pmode));
4075#else
4076      /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
4077      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4078				gen_int_mode (rounded_size, Pmode));
4079#endif
4080      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4081    }
4082
4083  dest = gen_rtx_MEM (mode, dest_addr);
4084
4085  if (type != 0)
4086    {
4087      set_mem_attributes (dest, type, 1);
4088
4089      if (cfun->tail_call_marked)
4090	/* Function incoming arguments may overlap with sibling call
4091	   outgoing arguments and we cannot allow reordering of reads
4092	   from function arguments with stores to outgoing arguments
4093	   of sibling calls.  */
4094	set_mem_alias_set (dest, 0);
4095    }
4096  emit_move_insn (dest, x);
4097}
4098
4099/* Emit and annotate a single push insn.  */
4100
4101static void
4102emit_single_push_insn (machine_mode mode, rtx x, tree type)
4103{
4104  int delta, old_delta = stack_pointer_delta;
4105  rtx_insn *prev = get_last_insn ();
4106  rtx_insn *last;
4107
4108  emit_single_push_insn_1 (mode, x, type);
4109
4110  last = get_last_insn ();
4111
4112  /* Notice the common case where we emitted exactly one insn.  */
4113  if (PREV_INSN (last) == prev)
4114    {
4115      add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4116      return;
4117    }
4118
4119  delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4120  gcc_assert (delta == INT_MIN || delta == old_delta);
4121}
4122#endif
4123
4124/* Generate code to push X onto the stack, assuming it has mode MODE and
4125   type TYPE.
4126   MODE is redundant except when X is a CONST_INT (since they don't
4127   carry mode info).
4128   SIZE is an rtx for the size of data to be copied (in bytes),
4129   needed only if X is BLKmode.
4130
4131   ALIGN (in bits) is maximum alignment we can assume.
4132
4133   If PARTIAL and REG are both nonzero, then copy that many of the first
4134   bytes of X into registers starting with REG, and push the rest of X.
4135   The amount of space pushed is decreased by PARTIAL bytes.
4136   REG must be a hard register in this case.
4137   If REG is zero but PARTIAL is not, take any all others actions for an
4138   argument partially in registers, but do not actually load any
4139   registers.
4140
4141   EXTRA is the amount in bytes of extra space to leave next to this arg.
4142   This is ignored if an argument block has already been allocated.
4143
4144   On a machine that lacks real push insns, ARGS_ADDR is the address of
4145   the bottom of the argument block for this call.  We use indexing off there
4146   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
4147   argument block has not been preallocated.
4148
4149   ARGS_SO_FAR is the size of args previously pushed for this call.
4150
4151   REG_PARM_STACK_SPACE is nonzero if functions require stack space
4152   for arguments passed in registers.  If nonzero, it will be the number
4153   of bytes required.  */
4154
4155void
4156emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4157		unsigned int align, int partial, rtx reg, int extra,
4158		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4159		rtx alignment_pad)
4160{
4161  rtx xinner;
4162  enum direction stack_direction
4163#ifdef STACK_GROWS_DOWNWARD
4164    = downward;
4165#else
4166    = upward;
4167#endif
4168
4169  /* Decide where to pad the argument: `downward' for below,
4170     `upward' for above, or `none' for don't pad it.
4171     Default is below for small data on big-endian machines; else above.  */
4172  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4173
4174  /* Invert direction if stack is post-decrement.
4175     FIXME: why?  */
4176  if (STACK_PUSH_CODE == POST_DEC)
4177    if (where_pad != none)
4178      where_pad = (where_pad == downward ? upward : downward);
4179
4180  xinner = x;
4181
4182  if (mode == BLKmode
4183      || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4184    {
4185      /* Copy a block into the stack, entirely or partially.  */
4186
4187      rtx temp;
4188      int used;
4189      int offset;
4190      int skip;
4191
4192      offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4193      used = partial - offset;
4194
4195      if (mode != BLKmode)
4196	{
4197	  /* A value is to be stored in an insufficiently aligned
4198	     stack slot; copy via a suitably aligned slot if
4199	     necessary.  */
4200	  size = GEN_INT (GET_MODE_SIZE (mode));
4201	  if (!MEM_P (xinner))
4202	    {
4203	      temp = assign_temp (type, 1, 1);
4204	      emit_move_insn (temp, xinner);
4205	      xinner = temp;
4206	    }
4207	}
4208
4209      gcc_assert (size);
4210
4211      /* USED is now the # of bytes we need not copy to the stack
4212	 because registers will take care of them.  */
4213
4214      if (partial != 0)
4215	xinner = adjust_address (xinner, BLKmode, used);
4216
4217      /* If the partial register-part of the arg counts in its stack size,
4218	 skip the part of stack space corresponding to the registers.
4219	 Otherwise, start copying to the beginning of the stack space,
4220	 by setting SKIP to 0.  */
4221      skip = (reg_parm_stack_space == 0) ? 0 : used;
4222
4223#ifdef PUSH_ROUNDING
4224      /* Do it with several push insns if that doesn't take lots of insns
4225	 and if there is no difficulty with push insns that skip bytes
4226	 on the stack for alignment purposes.  */
4227      if (args_addr == 0
4228	  && PUSH_ARGS
4229	  && CONST_INT_P (size)
4230	  && skip == 0
4231	  && MEM_ALIGN (xinner) >= align
4232	  && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4233	  /* Here we avoid the case of a structure whose weak alignment
4234	     forces many pushes of a small amount of data,
4235	     and such small pushes do rounding that causes trouble.  */
4236	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4237	      || align >= BIGGEST_ALIGNMENT
4238	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4239		  == (align / BITS_PER_UNIT)))
4240	  && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4241	{
4242	  /* Push padding now if padding above and stack grows down,
4243	     or if padding below and stack grows up.
4244	     But if space already allocated, this has already been done.  */
4245	  if (extra && args_addr == 0
4246	      && where_pad != none && where_pad != stack_direction)
4247	    anti_adjust_stack (GEN_INT (extra));
4248
4249	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4250	}
4251      else
4252#endif /* PUSH_ROUNDING  */
4253	{
4254	  rtx target;
4255
4256	  /* Otherwise make space on the stack and copy the data
4257	     to the address of that space.  */
4258
4259	  /* Deduct words put into registers from the size we must copy.  */
4260	  if (partial != 0)
4261	    {
4262	      if (CONST_INT_P (size))
4263		size = GEN_INT (INTVAL (size) - used);
4264	      else
4265		size = expand_binop (GET_MODE (size), sub_optab, size,
4266				     gen_int_mode (used, GET_MODE (size)),
4267				     NULL_RTX, 0, OPTAB_LIB_WIDEN);
4268	    }
4269
4270	  /* Get the address of the stack space.
4271	     In this case, we do not deal with EXTRA separately.
4272	     A single stack adjust will do.  */
4273	  if (! args_addr)
4274	    {
4275	      temp = push_block (size, extra, where_pad == downward);
4276	      extra = 0;
4277	    }
4278	  else if (CONST_INT_P (args_so_far))
4279	    temp = memory_address (BLKmode,
4280				   plus_constant (Pmode, args_addr,
4281						  skip + INTVAL (args_so_far)));
4282	  else
4283	    temp = memory_address (BLKmode,
4284				   plus_constant (Pmode,
4285						  gen_rtx_PLUS (Pmode,
4286								args_addr,
4287								args_so_far),
4288						  skip));
4289
4290	  if (!ACCUMULATE_OUTGOING_ARGS)
4291	    {
4292	      /* If the source is referenced relative to the stack pointer,
4293		 copy it to another register to stabilize it.  We do not need
4294		 to do this if we know that we won't be changing sp.  */
4295
4296	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4297		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4298		temp = copy_to_reg (temp);
4299	    }
4300
4301	  target = gen_rtx_MEM (BLKmode, temp);
4302
4303	  /* We do *not* set_mem_attributes here, because incoming arguments
4304	     may overlap with sibling call outgoing arguments and we cannot
4305	     allow reordering of reads from function arguments with stores
4306	     to outgoing arguments of sibling calls.  We do, however, want
4307	     to record the alignment of the stack slot.  */
4308	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4309	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4310	  set_mem_align (target, align);
4311
4312	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4313	}
4314    }
4315  else if (partial > 0)
4316    {
4317      /* Scalar partly in registers.  */
4318
4319      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4320      int i;
4321      int not_stack;
4322      /* # bytes of start of argument
4323	 that we must make space for but need not store.  */
4324      int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4325      int args_offset = INTVAL (args_so_far);
4326      int skip;
4327
4328      /* Push padding now if padding above and stack grows down,
4329	 or if padding below and stack grows up.
4330	 But if space already allocated, this has already been done.  */
4331      if (extra && args_addr == 0
4332	  && where_pad != none && where_pad != stack_direction)
4333	anti_adjust_stack (GEN_INT (extra));
4334
4335      /* If we make space by pushing it, we might as well push
4336	 the real data.  Otherwise, we can leave OFFSET nonzero
4337	 and leave the space uninitialized.  */
4338      if (args_addr == 0)
4339	offset = 0;
4340
4341      /* Now NOT_STACK gets the number of words that we don't need to
4342	 allocate on the stack.  Convert OFFSET to words too.  */
4343      not_stack = (partial - offset) / UNITS_PER_WORD;
4344      offset /= UNITS_PER_WORD;
4345
4346      /* If the partial register-part of the arg counts in its stack size,
4347	 skip the part of stack space corresponding to the registers.
4348	 Otherwise, start copying to the beginning of the stack space,
4349	 by setting SKIP to 0.  */
4350      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4351
4352      if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4353	x = validize_mem (force_const_mem (mode, x));
4354
4355      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4356	 SUBREGs of such registers are not allowed.  */
4357      if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4358	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4359	x = copy_to_reg (x);
4360
4361      /* Loop over all the words allocated on the stack for this arg.  */
4362      /* We can do it by words, because any scalar bigger than a word
4363	 has a size a multiple of a word.  */
4364      for (i = size - 1; i >= not_stack; i--)
4365	if (i >= not_stack + offset)
4366	  emit_push_insn (operand_subword_force (x, i, mode),
4367			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4368			  0, args_addr,
4369			  GEN_INT (args_offset + ((i - not_stack + skip)
4370						  * UNITS_PER_WORD)),
4371			  reg_parm_stack_space, alignment_pad);
4372    }
4373  else
4374    {
4375      rtx addr;
4376      rtx dest;
4377
4378      /* Push padding now if padding above and stack grows down,
4379	 or if padding below and stack grows up.
4380	 But if space already allocated, this has already been done.  */
4381      if (extra && args_addr == 0
4382	  && where_pad != none && where_pad != stack_direction)
4383	anti_adjust_stack (GEN_INT (extra));
4384
4385#ifdef PUSH_ROUNDING
4386      if (args_addr == 0 && PUSH_ARGS)
4387	emit_single_push_insn (mode, x, type);
4388      else
4389#endif
4390	{
4391	  if (CONST_INT_P (args_so_far))
4392	    addr
4393	      = memory_address (mode,
4394				plus_constant (Pmode, args_addr,
4395					       INTVAL (args_so_far)));
4396	  else
4397	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4398						       args_so_far));
4399	  dest = gen_rtx_MEM (mode, addr);
4400
4401	  /* We do *not* set_mem_attributes here, because incoming arguments
4402	     may overlap with sibling call outgoing arguments and we cannot
4403	     allow reordering of reads from function arguments with stores
4404	     to outgoing arguments of sibling calls.  We do, however, want
4405	     to record the alignment of the stack slot.  */
4406	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4407	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4408	  set_mem_align (dest, align);
4409
4410	  emit_move_insn (dest, x);
4411	}
4412    }
4413
4414  /* If part should go in registers, copy that part
4415     into the appropriate registers.  Do this now, at the end,
4416     since mem-to-mem copies above may do function calls.  */
4417  if (partial > 0 && reg != 0)
4418    {
4419      /* Handle calls that pass values in multiple non-contiguous locations.
4420	 The Irix 6 ABI has examples of this.  */
4421      if (GET_CODE (reg) == PARALLEL)
4422	emit_group_load (reg, x, type, -1);
4423      else
4424	{
4425	  gcc_assert (partial % UNITS_PER_WORD == 0);
4426	  move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4427	}
4428    }
4429
4430  if (extra && args_addr == 0 && where_pad == stack_direction)
4431    anti_adjust_stack (GEN_INT (extra));
4432
4433  if (alignment_pad && args_addr == 0)
4434    anti_adjust_stack (alignment_pad);
4435}
4436
4437/* Return X if X can be used as a subtarget in a sequence of arithmetic
4438   operations.  */
4439
4440static rtx
4441get_subtarget (rtx x)
4442{
4443  return (optimize
4444          || x == 0
4445	   /* Only registers can be subtargets.  */
4446	   || !REG_P (x)
4447	   /* Don't use hard regs to avoid extending their life.  */
4448	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4449	  ? 0 : x);
4450}
4451
4452/* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4453   FIELD is a bitfield.  Returns true if the optimization was successful,
4454   and there's nothing else to do.  */
4455
4456static bool
4457optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4458				 unsigned HOST_WIDE_INT bitpos,
4459				 unsigned HOST_WIDE_INT bitregion_start,
4460				 unsigned HOST_WIDE_INT bitregion_end,
4461				 machine_mode mode1, rtx str_rtx,
4462				 tree to, tree src)
4463{
4464  machine_mode str_mode = GET_MODE (str_rtx);
4465  unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4466  tree op0, op1;
4467  rtx value, result;
4468  optab binop;
4469  gimple srcstmt;
4470  enum tree_code code;
4471
4472  if (mode1 != VOIDmode
4473      || bitsize >= BITS_PER_WORD
4474      || str_bitsize > BITS_PER_WORD
4475      || TREE_SIDE_EFFECTS (to)
4476      || TREE_THIS_VOLATILE (to))
4477    return false;
4478
4479  STRIP_NOPS (src);
4480  if (TREE_CODE (src) != SSA_NAME)
4481    return false;
4482  if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4483    return false;
4484
4485  srcstmt = get_gimple_for_ssa_name (src);
4486  if (!srcstmt
4487      || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4488    return false;
4489
4490  code = gimple_assign_rhs_code (srcstmt);
4491
4492  op0 = gimple_assign_rhs1 (srcstmt);
4493
4494  /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4495     to find its initialization.  Hopefully the initialization will
4496     be from a bitfield load.  */
4497  if (TREE_CODE (op0) == SSA_NAME)
4498    {
4499      gimple op0stmt = get_gimple_for_ssa_name (op0);
4500
4501      /* We want to eventually have OP0 be the same as TO, which
4502	 should be a bitfield.  */
4503      if (!op0stmt
4504	  || !is_gimple_assign (op0stmt)
4505	  || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4506	return false;
4507      op0 = gimple_assign_rhs1 (op0stmt);
4508    }
4509
4510  op1 = gimple_assign_rhs2 (srcstmt);
4511
4512  if (!operand_equal_p (to, op0, 0))
4513    return false;
4514
4515  if (MEM_P (str_rtx))
4516    {
4517      unsigned HOST_WIDE_INT offset1;
4518
4519      if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4520	str_mode = word_mode;
4521      str_mode = get_best_mode (bitsize, bitpos,
4522				bitregion_start, bitregion_end,
4523				MEM_ALIGN (str_rtx), str_mode, 0);
4524      if (str_mode == VOIDmode)
4525	return false;
4526      str_bitsize = GET_MODE_BITSIZE (str_mode);
4527
4528      offset1 = bitpos;
4529      bitpos %= str_bitsize;
4530      offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4531      str_rtx = adjust_address (str_rtx, str_mode, offset1);
4532    }
4533  else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4534    return false;
4535
4536  /* If the bit field covers the whole REG/MEM, store_field
4537     will likely generate better code.  */
4538  if (bitsize >= str_bitsize)
4539    return false;
4540
4541  /* We can't handle fields split across multiple entities.  */
4542  if (bitpos + bitsize > str_bitsize)
4543    return false;
4544
4545  if (BYTES_BIG_ENDIAN)
4546    bitpos = str_bitsize - bitpos - bitsize;
4547
4548  switch (code)
4549    {
4550    case PLUS_EXPR:
4551    case MINUS_EXPR:
4552      /* For now, just optimize the case of the topmost bitfield
4553	 where we don't need to do any masking and also
4554	 1 bit bitfields where xor can be used.
4555	 We might win by one instruction for the other bitfields
4556	 too if insv/extv instructions aren't used, so that
4557	 can be added later.  */
4558      if (bitpos + bitsize != str_bitsize
4559	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4560	break;
4561
4562      value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4563      value = convert_modes (str_mode,
4564			     TYPE_MODE (TREE_TYPE (op1)), value,
4565			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4566
4567      /* We may be accessing data outside the field, which means
4568	 we can alias adjacent data.  */
4569      if (MEM_P (str_rtx))
4570	{
4571	  str_rtx = shallow_copy_rtx (str_rtx);
4572	  set_mem_alias_set (str_rtx, 0);
4573	  set_mem_expr (str_rtx, 0);
4574	}
4575
4576      binop = code == PLUS_EXPR ? add_optab : sub_optab;
4577      if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4578	{
4579	  value = expand_and (str_mode, value, const1_rtx, NULL);
4580	  binop = xor_optab;
4581	}
4582      value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4583      result = expand_binop (str_mode, binop, str_rtx,
4584			     value, str_rtx, 1, OPTAB_WIDEN);
4585      if (result != str_rtx)
4586	emit_move_insn (str_rtx, result);
4587      return true;
4588
4589    case BIT_IOR_EXPR:
4590    case BIT_XOR_EXPR:
4591      if (TREE_CODE (op1) != INTEGER_CST)
4592	break;
4593      value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4594      value = convert_modes (str_mode,
4595			     TYPE_MODE (TREE_TYPE (op1)), value,
4596			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4597
4598      /* We may be accessing data outside the field, which means
4599	 we can alias adjacent data.  */
4600      if (MEM_P (str_rtx))
4601	{
4602	  str_rtx = shallow_copy_rtx (str_rtx);
4603	  set_mem_alias_set (str_rtx, 0);
4604	  set_mem_expr (str_rtx, 0);
4605	}
4606
4607      binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4608      if (bitpos + bitsize != str_bitsize)
4609	{
4610	  rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4611				   str_mode);
4612	  value = expand_and (str_mode, value, mask, NULL_RTX);
4613	}
4614      value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4615      result = expand_binop (str_mode, binop, str_rtx,
4616			     value, str_rtx, 1, OPTAB_WIDEN);
4617      if (result != str_rtx)
4618	emit_move_insn (str_rtx, result);
4619      return true;
4620
4621    default:
4622      break;
4623    }
4624
4625  return false;
4626}
4627
4628/* In the C++ memory model, consecutive bit fields in a structure are
4629   considered one memory location.
4630
4631   Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4632   returns the bit range of consecutive bits in which this COMPONENT_REF
4633   belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
4634   and *OFFSET may be adjusted in the process.
4635
4636   If the access does not need to be restricted, 0 is returned in both
4637   *BITSTART and *BITEND.  */
4638
4639static void
4640get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4641	       unsigned HOST_WIDE_INT *bitend,
4642	       tree exp,
4643	       HOST_WIDE_INT *bitpos,
4644	       tree *offset)
4645{
4646  HOST_WIDE_INT bitoffset;
4647  tree field, repr;
4648
4649  gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4650
4651  field = TREE_OPERAND (exp, 1);
4652  repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4653  /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4654     need to limit the range we can access.  */
4655  if (!repr)
4656    {
4657      *bitstart = *bitend = 0;
4658      return;
4659    }
4660
4661  /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4662     part of a larger bit field, then the representative does not serve any
4663     useful purpose.  This can occur in Ada.  */
4664  if (handled_component_p (TREE_OPERAND (exp, 0)))
4665    {
4666      machine_mode rmode;
4667      HOST_WIDE_INT rbitsize, rbitpos;
4668      tree roffset;
4669      int unsignedp;
4670      int volatilep = 0;
4671      get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4672			   &roffset, &rmode, &unsignedp, &volatilep, false);
4673      if ((rbitpos % BITS_PER_UNIT) != 0)
4674	{
4675	  *bitstart = *bitend = 0;
4676	  return;
4677	}
4678    }
4679
4680  /* Compute the adjustment to bitpos from the offset of the field
4681     relative to the representative.  DECL_FIELD_OFFSET of field and
4682     repr are the same by construction if they are not constants,
4683     see finish_bitfield_layout.  */
4684  if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4685      && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4686    bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4687		 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4688  else
4689    bitoffset = 0;
4690  bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4691		- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4692
4693  /* If the adjustment is larger than bitpos, we would have a negative bit
4694     position for the lower bound and this may wreak havoc later.  Adjust
4695     offset and bitpos to make the lower bound non-negative in that case.  */
4696  if (bitoffset > *bitpos)
4697    {
4698      HOST_WIDE_INT adjust = bitoffset - *bitpos;
4699      gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4700
4701      *bitpos += adjust;
4702      if (*offset == NULL_TREE)
4703	*offset = size_int (-adjust / BITS_PER_UNIT);
4704      else
4705	*offset
4706	  = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4707      *bitstart = 0;
4708    }
4709  else
4710    *bitstart = *bitpos - bitoffset;
4711
4712  *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4713}
4714
4715/* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4716   in memory and has non-BLKmode.  DECL_RTL must not be a MEM; if
4717   DECL_RTL was not set yet, return NORTL.  */
4718
4719static inline bool
4720addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4721{
4722  if (TREE_CODE (addr) != ADDR_EXPR)
4723    return false;
4724
4725  tree base = TREE_OPERAND (addr, 0);
4726
4727  if (!DECL_P (base)
4728      || TREE_ADDRESSABLE (base)
4729      || DECL_MODE (base) == BLKmode)
4730    return false;
4731
4732  if (!DECL_RTL_SET_P (base))
4733    return nortl;
4734
4735  return (!MEM_P (DECL_RTL (base)));
4736}
4737
4738/* Returns true if the MEM_REF REF refers to an object that does not
4739   reside in memory and has non-BLKmode.  */
4740
4741static inline bool
4742mem_ref_refers_to_non_mem_p (tree ref)
4743{
4744  tree base = TREE_OPERAND (ref, 0);
4745  return addr_expr_of_non_mem_decl_p_1 (base, false);
4746}
4747
4748/* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4749   is true, try generating a nontemporal store.  */
4750
4751void
4752expand_assignment (tree to, tree from, bool nontemporal)
4753{
4754  rtx to_rtx = 0;
4755  rtx result;
4756  machine_mode mode;
4757  unsigned int align;
4758  enum insn_code icode;
4759
4760  /* Don't crash if the lhs of the assignment was erroneous.  */
4761  if (TREE_CODE (to) == ERROR_MARK)
4762    {
4763      expand_normal (from);
4764      return;
4765    }
4766
4767  /* Optimize away no-op moves without side-effects.  */
4768  if (operand_equal_p (to, from, 0))
4769    return;
4770
4771  /* Handle misaligned stores.  */
4772  mode = TYPE_MODE (TREE_TYPE (to));
4773  if ((TREE_CODE (to) == MEM_REF
4774       || TREE_CODE (to) == TARGET_MEM_REF)
4775      && mode != BLKmode
4776      && !mem_ref_refers_to_non_mem_p (to)
4777      && ((align = get_object_alignment (to))
4778	  < GET_MODE_ALIGNMENT (mode))
4779      && (((icode = optab_handler (movmisalign_optab, mode))
4780	   != CODE_FOR_nothing)
4781	  || SLOW_UNALIGNED_ACCESS (mode, align)))
4782    {
4783      rtx reg, mem;
4784
4785      reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4786      reg = force_not_mem (reg);
4787      mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4788
4789      if (icode != CODE_FOR_nothing)
4790	{
4791	  struct expand_operand ops[2];
4792
4793	  create_fixed_operand (&ops[0], mem);
4794	  create_input_operand (&ops[1], reg, mode);
4795	  /* The movmisalign<mode> pattern cannot fail, else the assignment
4796	     would silently be omitted.  */
4797	  expand_insn (icode, 2, ops);
4798	}
4799      else
4800	store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4801      return;
4802    }
4803
4804  /* Assignment of a structure component needs special treatment
4805     if the structure component's rtx is not simply a MEM.
4806     Assignment of an array element at a constant index, and assignment of
4807     an array element in an unaligned packed structure field, has the same
4808     problem.  Same for (partially) storing into a non-memory object.  */
4809  if (handled_component_p (to)
4810      || (TREE_CODE (to) == MEM_REF
4811	  && mem_ref_refers_to_non_mem_p (to))
4812      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4813    {
4814      machine_mode mode1;
4815      HOST_WIDE_INT bitsize, bitpos;
4816      unsigned HOST_WIDE_INT bitregion_start = 0;
4817      unsigned HOST_WIDE_INT bitregion_end = 0;
4818      tree offset;
4819      int unsignedp;
4820      int volatilep = 0;
4821      tree tem;
4822
4823      push_temp_slots ();
4824      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4825				 &unsignedp, &volatilep, true);
4826
4827      /* Make sure bitpos is not negative, it can wreak havoc later.  */
4828      if (bitpos < 0)
4829	{
4830	  gcc_assert (offset == NULL_TREE);
4831	  offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4832					? 3 : exact_log2 (BITS_PER_UNIT)));
4833	  bitpos &= BITS_PER_UNIT - 1;
4834	}
4835
4836      if (TREE_CODE (to) == COMPONENT_REF
4837	  && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4838	get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4839      /* The C++ memory model naturally applies to byte-aligned fields.
4840	 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4841	 BITSIZE are not byte-aligned, there is no need to limit the range
4842	 we can access.  This can occur with packed structures in Ada.  */
4843      else if (bitsize > 0
4844	       && bitsize % BITS_PER_UNIT == 0
4845	       && bitpos % BITS_PER_UNIT == 0)
4846	{
4847	  bitregion_start = bitpos;
4848	  bitregion_end = bitpos + bitsize - 1;
4849	}
4850
4851      to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4852
4853      /* If the field has a mode, we want to access it in the
4854	 field's mode, not the computed mode.
4855	 If a MEM has VOIDmode (external with incomplete type),
4856	 use BLKmode for it instead.  */
4857      if (MEM_P (to_rtx))
4858	{
4859	  if (mode1 != VOIDmode)
4860	    to_rtx = adjust_address (to_rtx, mode1, 0);
4861	  else if (GET_MODE (to_rtx) == VOIDmode)
4862	    to_rtx = adjust_address (to_rtx, BLKmode, 0);
4863	}
4864
4865      if (offset != 0)
4866	{
4867	  machine_mode address_mode;
4868	  rtx offset_rtx;
4869
4870	  if (!MEM_P (to_rtx))
4871	    {
4872	      /* We can get constant negative offsets into arrays with broken
4873		 user code.  Translate this to a trap instead of ICEing.  */
4874	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4875	      expand_builtin_trap ();
4876	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4877	    }
4878
4879	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4880	  address_mode = get_address_mode (to_rtx);
4881	  if (GET_MODE (offset_rtx) != address_mode)
4882	    offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4883
4884	  /* If we have an expression in OFFSET_RTX and a non-zero
4885	     byte offset in BITPOS, adding the byte offset before the
4886	     OFFSET_RTX results in better intermediate code, which makes
4887	     later rtl optimization passes perform better.
4888
4889	     We prefer intermediate code like this:
4890
4891	     r124:DI=r123:DI+0x18
4892	     [r124:DI]=r121:DI
4893
4894	     ... instead of ...
4895
4896	     r124:DI=r123:DI+0x10
4897	     [r124:DI+0x8]=r121:DI
4898
4899	     This is only done for aligned data values, as these can
4900	     be expected to result in single move instructions.  */
4901	  if (mode1 != VOIDmode
4902	      && bitpos != 0
4903	      && bitsize > 0
4904	      && (bitpos % bitsize) == 0
4905	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4906	      && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4907	    {
4908	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4909	      bitregion_start = 0;
4910	      if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4911		bitregion_end -= bitpos;
4912	      bitpos = 0;
4913	    }
4914
4915	  to_rtx = offset_address (to_rtx, offset_rtx,
4916				   highest_pow2_factor_for_target (to,
4917				   				   offset));
4918	}
4919
4920      /* No action is needed if the target is not a memory and the field
4921	 lies completely outside that target.  This can occur if the source
4922	 code contains an out-of-bounds access to a small array.  */
4923      if (!MEM_P (to_rtx)
4924	  && GET_MODE (to_rtx) != BLKmode
4925	  && (unsigned HOST_WIDE_INT) bitpos
4926	     >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4927	{
4928	  expand_normal (from);
4929	  result = NULL;
4930	}
4931      /* Handle expand_expr of a complex value returning a CONCAT.  */
4932      else if (GET_CODE (to_rtx) == CONCAT)
4933	{
4934	  unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4935	  if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4936	      && bitpos == 0
4937	      && bitsize == mode_bitsize)
4938	    result = store_expr (from, to_rtx, false, nontemporal);
4939	  else if (bitsize == mode_bitsize / 2
4940		   && (bitpos == 0 || bitpos == mode_bitsize / 2))
4941	    result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4942				 nontemporal);
4943	  else if (bitpos + bitsize <= mode_bitsize / 2)
4944	    result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4945				  bitregion_start, bitregion_end,
4946				  mode1, from,
4947				  get_alias_set (to), nontemporal);
4948	  else if (bitpos >= mode_bitsize / 2)
4949	    result = store_field (XEXP (to_rtx, 1), bitsize,
4950				  bitpos - mode_bitsize / 2,
4951				  bitregion_start, bitregion_end,
4952				  mode1, from,
4953				  get_alias_set (to), nontemporal);
4954	  else if (bitpos == 0 && bitsize == mode_bitsize)
4955	    {
4956	      rtx from_rtx;
4957	      result = expand_normal (from);
4958	      from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4959					      TYPE_MODE (TREE_TYPE (from)), 0);
4960	      emit_move_insn (XEXP (to_rtx, 0),
4961			      read_complex_part (from_rtx, false));
4962	      emit_move_insn (XEXP (to_rtx, 1),
4963			      read_complex_part (from_rtx, true));
4964	    }
4965	  else
4966	    {
4967	      rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4968					    GET_MODE_SIZE (GET_MODE (to_rtx)));
4969	      write_complex_part (temp, XEXP (to_rtx, 0), false);
4970	      write_complex_part (temp, XEXP (to_rtx, 1), true);
4971	      result = store_field (temp, bitsize, bitpos,
4972				    bitregion_start, bitregion_end,
4973				    mode1, from,
4974				    get_alias_set (to), nontemporal);
4975	      emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4976	      emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4977	    }
4978	}
4979      else
4980	{
4981	  if (MEM_P (to_rtx))
4982	    {
4983	      /* If the field is at offset zero, we could have been given the
4984		 DECL_RTX of the parent struct.  Don't munge it.  */
4985	      to_rtx = shallow_copy_rtx (to_rtx);
4986	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4987	      if (volatilep)
4988		MEM_VOLATILE_P (to_rtx) = 1;
4989	    }
4990
4991	  if (optimize_bitfield_assignment_op (bitsize, bitpos,
4992					       bitregion_start, bitregion_end,
4993					       mode1,
4994					       to_rtx, to, from))
4995	    result = NULL;
4996	  else
4997	    result = store_field (to_rtx, bitsize, bitpos,
4998				  bitregion_start, bitregion_end,
4999				  mode1, from,
5000				  get_alias_set (to), nontemporal);
5001	}
5002
5003      if (result)
5004	preserve_temp_slots (result);
5005      pop_temp_slots ();
5006      return;
5007    }
5008
5009  /* If the rhs is a function call and its value is not an aggregate,
5010     call the function before we start to compute the lhs.
5011     This is needed for correct code for cases such as
5012     val = setjmp (buf) on machines where reference to val
5013     requires loading up part of an address in a separate insn.
5014
5015     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5016     since it might be a promoted variable where the zero- or sign- extension
5017     needs to be done.  Handling this in the normal way is safe because no
5018     computation is done before the call.  The same is true for SSA names.  */
5019  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5020      && COMPLETE_TYPE_P (TREE_TYPE (from))
5021      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5022      && ! (((TREE_CODE (to) == VAR_DECL
5023	      || TREE_CODE (to) == PARM_DECL
5024	      || TREE_CODE (to) == RESULT_DECL)
5025	     && REG_P (DECL_RTL (to)))
5026	    || TREE_CODE (to) == SSA_NAME))
5027    {
5028      rtx value;
5029      rtx bounds;
5030
5031      push_temp_slots ();
5032      value = expand_normal (from);
5033
5034      /* Split value and bounds to store them separately.  */
5035      chkp_split_slot (value, &value, &bounds);
5036
5037      if (to_rtx == 0)
5038	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5039
5040      /* Handle calls that return values in multiple non-contiguous locations.
5041	 The Irix 6 ABI has examples of this.  */
5042      if (GET_CODE (to_rtx) == PARALLEL)
5043	{
5044	  if (GET_CODE (value) == PARALLEL)
5045	    emit_group_move (to_rtx, value);
5046	  else
5047	    emit_group_load (to_rtx, value, TREE_TYPE (from),
5048			     int_size_in_bytes (TREE_TYPE (from)));
5049	}
5050      else if (GET_CODE (value) == PARALLEL)
5051	emit_group_store (to_rtx, value, TREE_TYPE (from),
5052			  int_size_in_bytes (TREE_TYPE (from)));
5053      else if (GET_MODE (to_rtx) == BLKmode)
5054	{
5055	  /* Handle calls that return BLKmode values in registers.  */
5056	  if (REG_P (value))
5057	    copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5058	  else
5059	    emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5060	}
5061      else
5062	{
5063	  if (POINTER_TYPE_P (TREE_TYPE (to)))
5064	    value = convert_memory_address_addr_space
5065		      (GET_MODE (to_rtx), value,
5066		       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5067
5068	  emit_move_insn (to_rtx, value);
5069	}
5070
5071      /* Store bounds if required.  */
5072      if (bounds
5073	  && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5074	{
5075	  gcc_assert (MEM_P (to_rtx));
5076	  chkp_emit_bounds_store (bounds, value, to_rtx);
5077	}
5078
5079      preserve_temp_slots (to_rtx);
5080      pop_temp_slots ();
5081      return;
5082    }
5083
5084  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
5085  to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5086
5087  /* Don't move directly into a return register.  */
5088  if (TREE_CODE (to) == RESULT_DECL
5089      && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5090    {
5091      rtx temp;
5092
5093      push_temp_slots ();
5094
5095      /* If the source is itself a return value, it still is in a pseudo at
5096	 this point so we can move it back to the return register directly.  */
5097      if (REG_P (to_rtx)
5098	  && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5099	  && TREE_CODE (from) != CALL_EXPR)
5100	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5101      else
5102	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5103
5104      /* Handle calls that return values in multiple non-contiguous locations.
5105	 The Irix 6 ABI has examples of this.  */
5106      if (GET_CODE (to_rtx) == PARALLEL)
5107	{
5108	  if (GET_CODE (temp) == PARALLEL)
5109	    emit_group_move (to_rtx, temp);
5110	  else
5111	    emit_group_load (to_rtx, temp, TREE_TYPE (from),
5112			     int_size_in_bytes (TREE_TYPE (from)));
5113	}
5114      else if (temp)
5115	emit_move_insn (to_rtx, temp);
5116
5117      preserve_temp_slots (to_rtx);
5118      pop_temp_slots ();
5119      return;
5120    }
5121
5122  /* In case we are returning the contents of an object which overlaps
5123     the place the value is being stored, use a safe function when copying
5124     a value through a pointer into a structure value return block.  */
5125  if (TREE_CODE (to) == RESULT_DECL
5126      && TREE_CODE (from) == INDIRECT_REF
5127      && ADDR_SPACE_GENERIC_P
5128	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5129      && refs_may_alias_p (to, from)
5130      && cfun->returns_struct
5131      && !cfun->returns_pcc_struct)
5132    {
5133      rtx from_rtx, size;
5134
5135      push_temp_slots ();
5136      size = expr_size (from);
5137      from_rtx = expand_normal (from);
5138
5139      emit_library_call (memmove_libfunc, LCT_NORMAL,
5140			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5141			 XEXP (from_rtx, 0), Pmode,
5142			 convert_to_mode (TYPE_MODE (sizetype),
5143					  size, TYPE_UNSIGNED (sizetype)),
5144			 TYPE_MODE (sizetype));
5145
5146      preserve_temp_slots (to_rtx);
5147      pop_temp_slots ();
5148      return;
5149    }
5150
5151  /* Compute FROM and store the value in the rtx we got.  */
5152
5153  push_temp_slots ();
5154  result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5155  preserve_temp_slots (result);
5156  pop_temp_slots ();
5157  return;
5158}
5159
5160/* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
5161   succeeded, false otherwise.  */
5162
5163bool
5164emit_storent_insn (rtx to, rtx from)
5165{
5166  struct expand_operand ops[2];
5167  machine_mode mode = GET_MODE (to);
5168  enum insn_code code = optab_handler (storent_optab, mode);
5169
5170  if (code == CODE_FOR_nothing)
5171    return false;
5172
5173  create_fixed_operand (&ops[0], to);
5174  create_input_operand (&ops[1], from, mode);
5175  return maybe_expand_insn (code, 2, ops);
5176}
5177
5178/* Generate code for computing expression EXP,
5179   and storing the value into TARGET.
5180
5181   If the mode is BLKmode then we may return TARGET itself.
5182   It turns out that in BLKmode it doesn't cause a problem.
5183   because C has no operators that could combine two different
5184   assignments into the same BLKmode object with different values
5185   with no sequence point.  Will other languages need this to
5186   be more thorough?
5187
5188   If CALL_PARAM_P is nonzero, this is a store into a call param on the
5189   stack, and block moves may need to be treated specially.
5190
5191   If NONTEMPORAL is true, try using a nontemporal store instruction.
5192
5193   If BTARGET is not NULL then computed bounds of EXP are
5194   associated with BTARGET.  */
5195
5196rtx
5197store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5198			bool nontemporal, tree btarget)
5199{
5200  rtx temp;
5201  rtx alt_rtl = NULL_RTX;
5202  location_t loc = curr_insn_location ();
5203
5204  if (VOID_TYPE_P (TREE_TYPE (exp)))
5205    {
5206      /* C++ can generate ?: expressions with a throw expression in one
5207	 branch and an rvalue in the other. Here, we resolve attempts to
5208	 store the throw expression's nonexistent result.  */
5209      gcc_assert (!call_param_p);
5210      expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5211      return NULL_RTX;
5212    }
5213  if (TREE_CODE (exp) == COMPOUND_EXPR)
5214    {
5215      /* Perform first part of compound expression, then assign from second
5216	 part.  */
5217      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5218		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5219      return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5220				     call_param_p, nontemporal, btarget);
5221    }
5222  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5223    {
5224      /* For conditional expression, get safe form of the target.  Then
5225	 test the condition, doing the appropriate assignment on either
5226	 side.  This avoids the creation of unnecessary temporaries.
5227	 For non-BLKmode, it is more efficient not to do this.  */
5228
5229      rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5230
5231      do_pending_stack_adjust ();
5232      NO_DEFER_POP;
5233      jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5234      store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5235			      nontemporal, btarget);
5236      emit_jump_insn (gen_jump (lab2));
5237      emit_barrier ();
5238      emit_label (lab1);
5239      store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5240			      nontemporal, btarget);
5241      emit_label (lab2);
5242      OK_DEFER_POP;
5243
5244      return NULL_RTX;
5245    }
5246  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5247    /* If this is a scalar in a register that is stored in a wider mode
5248       than the declared mode, compute the result into its declared mode
5249       and then convert to the wider mode.  Our value is the computed
5250       expression.  */
5251    {
5252      rtx inner_target = 0;
5253
5254      /* We can do the conversion inside EXP, which will often result
5255	 in some optimizations.  Do the conversion in two steps: first
5256	 change the signedness, if needed, then the extend.  But don't
5257	 do this if the type of EXP is a subtype of something else
5258	 since then the conversion might involve more than just
5259	 converting modes.  */
5260      if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5261	  && TREE_TYPE (TREE_TYPE (exp)) == 0
5262	  && GET_MODE_PRECISION (GET_MODE (target))
5263	     == TYPE_PRECISION (TREE_TYPE (exp)))
5264	{
5265	  if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5266					  TYPE_UNSIGNED (TREE_TYPE (exp))))
5267	    {
5268	      /* Some types, e.g. Fortran's logical*4, won't have a signed
5269		 version, so use the mode instead.  */
5270	      tree ntype
5271		= (signed_or_unsigned_type_for
5272		   (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5273	      if (ntype == NULL)
5274		ntype = lang_hooks.types.type_for_mode
5275		  (TYPE_MODE (TREE_TYPE (exp)),
5276		   SUBREG_PROMOTED_SIGN (target));
5277
5278	      exp = fold_convert_loc (loc, ntype, exp);
5279	    }
5280
5281	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5282				  (GET_MODE (SUBREG_REG (target)),
5283				   SUBREG_PROMOTED_SIGN (target)),
5284				  exp);
5285
5286	  inner_target = SUBREG_REG (target);
5287	}
5288
5289      temp = expand_expr (exp, inner_target, VOIDmode,
5290			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5291
5292      /* Handle bounds returned by call.  */
5293      if (TREE_CODE (exp) == CALL_EXPR)
5294	{
5295	  rtx bounds;
5296	  chkp_split_slot (temp, &temp, &bounds);
5297	  if (bounds && btarget)
5298	    {
5299	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5300	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5301	      chkp_set_rtl_bounds (btarget, tmp);
5302	    }
5303	}
5304
5305      /* If TEMP is a VOIDmode constant, use convert_modes to make
5306	 sure that we properly convert it.  */
5307      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5308	{
5309	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5310				temp, SUBREG_PROMOTED_SIGN (target));
5311	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5312			        GET_MODE (target), temp,
5313				SUBREG_PROMOTED_SIGN (target));
5314	}
5315
5316      convert_move (SUBREG_REG (target), temp,
5317		    SUBREG_PROMOTED_SIGN (target));
5318
5319      return NULL_RTX;
5320    }
5321  else if ((TREE_CODE (exp) == STRING_CST
5322	    || (TREE_CODE (exp) == MEM_REF
5323		&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5324		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5325		   == STRING_CST
5326		&& integer_zerop (TREE_OPERAND (exp, 1))))
5327	   && !nontemporal && !call_param_p
5328	   && MEM_P (target))
5329    {
5330      /* Optimize initialization of an array with a STRING_CST.  */
5331      HOST_WIDE_INT exp_len, str_copy_len;
5332      rtx dest_mem;
5333      tree str = TREE_CODE (exp) == STRING_CST
5334		 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5335
5336      exp_len = int_expr_size (exp);
5337      if (exp_len <= 0)
5338	goto normal_expr;
5339
5340      if (TREE_STRING_LENGTH (str) <= 0)
5341	goto normal_expr;
5342
5343      str_copy_len = strlen (TREE_STRING_POINTER (str));
5344      if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5345	goto normal_expr;
5346
5347      str_copy_len = TREE_STRING_LENGTH (str);
5348      if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5349	  && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5350	{
5351	  str_copy_len += STORE_MAX_PIECES - 1;
5352	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
5353	}
5354      str_copy_len = MIN (str_copy_len, exp_len);
5355      if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5356				CONST_CAST (char *, TREE_STRING_POINTER (str)),
5357				MEM_ALIGN (target), false))
5358	goto normal_expr;
5359
5360      dest_mem = target;
5361
5362      dest_mem = store_by_pieces (dest_mem,
5363				  str_copy_len, builtin_strncpy_read_str,
5364				  CONST_CAST (char *,
5365					      TREE_STRING_POINTER (str)),
5366				  MEM_ALIGN (target), false,
5367				  exp_len > str_copy_len ? 1 : 0);
5368      if (exp_len > str_copy_len)
5369	clear_storage (adjust_address (dest_mem, BLKmode, 0),
5370		       GEN_INT (exp_len - str_copy_len),
5371		       BLOCK_OP_NORMAL);
5372      return NULL_RTX;
5373    }
5374  else
5375    {
5376      rtx tmp_target;
5377
5378  normal_expr:
5379      /* If we want to use a nontemporal store, force the value to
5380	 register first.  */
5381      tmp_target = nontemporal ? NULL_RTX : target;
5382      temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5383			       (call_param_p
5384				? EXPAND_STACK_PARM : EXPAND_NORMAL),
5385			       &alt_rtl, false);
5386
5387      /* Handle bounds returned by call.  */
5388      if (TREE_CODE (exp) == CALL_EXPR)
5389	{
5390	  rtx bounds;
5391	  chkp_split_slot (temp, &temp, &bounds);
5392	  if (bounds && btarget)
5393	    {
5394	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5395	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5396	      chkp_set_rtl_bounds (btarget, tmp);
5397	    }
5398	}
5399    }
5400
5401  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5402     the same as that of TARGET, adjust the constant.  This is needed, for
5403     example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5404     only a word-sized value.  */
5405  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5406      && TREE_CODE (exp) != ERROR_MARK
5407      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5408    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5409			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5410
5411  /* If value was not generated in the target, store it there.
5412     Convert the value to TARGET's type first if necessary and emit the
5413     pending incrementations that have been queued when expanding EXP.
5414     Note that we cannot emit the whole queue blindly because this will
5415     effectively disable the POST_INC optimization later.
5416
5417     If TEMP and TARGET compare equal according to rtx_equal_p, but
5418     one or both of them are volatile memory refs, we have to distinguish
5419     two cases:
5420     - expand_expr has used TARGET.  In this case, we must not generate
5421       another copy.  This can be detected by TARGET being equal according
5422       to == .
5423     - expand_expr has not used TARGET - that means that the source just
5424       happens to have the same RTX form.  Since temp will have been created
5425       by expand_expr, it will compare unequal according to == .
5426       We must generate a copy in this case, to reach the correct number
5427       of volatile memory references.  */
5428
5429  if ((! rtx_equal_p (temp, target)
5430       || (temp != target && (side_effects_p (temp)
5431			      || side_effects_p (target))))
5432      && TREE_CODE (exp) != ERROR_MARK
5433      /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5434	 but TARGET is not valid memory reference, TEMP will differ
5435	 from TARGET although it is really the same location.  */
5436      && !(alt_rtl
5437	   && rtx_equal_p (alt_rtl, target)
5438	   && !side_effects_p (alt_rtl)
5439	   && !side_effects_p (target))
5440      /* If there's nothing to copy, don't bother.  Don't call
5441	 expr_size unless necessary, because some front-ends (C++)
5442	 expr_size-hook must not be given objects that are not
5443	 supposed to be bit-copied or bit-initialized.  */
5444      && expr_size (exp) != const0_rtx)
5445    {
5446      if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5447	{
5448	  if (GET_MODE (target) == BLKmode)
5449	    {
5450	      /* Handle calls that return BLKmode values in registers.  */
5451	      if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5452		copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5453	      else
5454		store_bit_field (target,
5455				 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5456				 0, 0, 0, GET_MODE (temp), temp);
5457	    }
5458	  else
5459	    convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5460	}
5461
5462      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5463	{
5464	  /* Handle copying a string constant into an array.  The string
5465	     constant may be shorter than the array.  So copy just the string's
5466	     actual length, and clear the rest.  First get the size of the data
5467	     type of the string, which is actually the size of the target.  */
5468	  rtx size = expr_size (exp);
5469
5470	  if (CONST_INT_P (size)
5471	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
5472	    emit_block_move (target, temp, size,
5473			     (call_param_p
5474			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5475	  else
5476	    {
5477	      machine_mode pointer_mode
5478		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5479	      machine_mode address_mode = get_address_mode (target);
5480
5481	      /* Compute the size of the data to copy from the string.  */
5482	      tree copy_size
5483		= size_binop_loc (loc, MIN_EXPR,
5484				  make_tree (sizetype, size),
5485				  size_int (TREE_STRING_LENGTH (exp)));
5486	      rtx copy_size_rtx
5487		= expand_expr (copy_size, NULL_RTX, VOIDmode,
5488			       (call_param_p
5489				? EXPAND_STACK_PARM : EXPAND_NORMAL));
5490	      rtx_code_label *label = 0;
5491
5492	      /* Copy that much.  */
5493	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5494					       TYPE_UNSIGNED (sizetype));
5495	      emit_block_move (target, temp, copy_size_rtx,
5496			       (call_param_p
5497				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5498
5499	      /* Figure out how much is left in TARGET that we have to clear.
5500		 Do all calculations in pointer_mode.  */
5501	      if (CONST_INT_P (copy_size_rtx))
5502		{
5503		  size = plus_constant (address_mode, size,
5504					-INTVAL (copy_size_rtx));
5505		  target = adjust_address (target, BLKmode,
5506					   INTVAL (copy_size_rtx));
5507		}
5508	      else
5509		{
5510		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5511				       copy_size_rtx, NULL_RTX, 0,
5512				       OPTAB_LIB_WIDEN);
5513
5514		  if (GET_MODE (copy_size_rtx) != address_mode)
5515		    copy_size_rtx = convert_to_mode (address_mode,
5516						     copy_size_rtx,
5517						     TYPE_UNSIGNED (sizetype));
5518
5519		  target = offset_address (target, copy_size_rtx,
5520					   highest_pow2_factor (copy_size));
5521		  label = gen_label_rtx ();
5522		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5523					   GET_MODE (size), 0, label);
5524		}
5525
5526	      if (size != const0_rtx)
5527		clear_storage (target, size, BLOCK_OP_NORMAL);
5528
5529	      if (label)
5530		emit_label (label);
5531	    }
5532	}
5533      /* Handle calls that return values in multiple non-contiguous locations.
5534	 The Irix 6 ABI has examples of this.  */
5535      else if (GET_CODE (target) == PARALLEL)
5536	{
5537	  if (GET_CODE (temp) == PARALLEL)
5538	    emit_group_move (target, temp);
5539	  else
5540	    emit_group_load (target, temp, TREE_TYPE (exp),
5541			     int_size_in_bytes (TREE_TYPE (exp)));
5542	}
5543      else if (GET_CODE (temp) == PARALLEL)
5544	emit_group_store (target, temp, TREE_TYPE (exp),
5545			  int_size_in_bytes (TREE_TYPE (exp)));
5546      else if (GET_MODE (temp) == BLKmode)
5547	emit_block_move (target, temp, expr_size (exp),
5548			 (call_param_p
5549			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5550      /* If we emit a nontemporal store, there is nothing else to do.  */
5551      else if (nontemporal && emit_storent_insn (target, temp))
5552	;
5553      else
5554	{
5555	  temp = force_operand (temp, target);
5556	  if (temp != target)
5557	    emit_move_insn (target, temp);
5558	}
5559    }
5560
5561  return NULL_RTX;
5562}
5563
5564/* Same as store_expr_with_bounds but ignoring bounds of EXP.  */
5565rtx
5566store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5567{
5568  return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5569}
5570
5571/* Return true if field F of structure TYPE is a flexible array.  */
5572
5573static bool
5574flexible_array_member_p (const_tree f, const_tree type)
5575{
5576  const_tree tf;
5577
5578  tf = TREE_TYPE (f);
5579  return (DECL_CHAIN (f) == NULL
5580	  && TREE_CODE (tf) == ARRAY_TYPE
5581	  && TYPE_DOMAIN (tf)
5582	  && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5583	  && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5584	  && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5585	  && int_size_in_bytes (type) >= 0);
5586}
5587
5588/* If FOR_CTOR_P, return the number of top-level elements that a constructor
5589   must have in order for it to completely initialize a value of type TYPE.
5590   Return -1 if the number isn't known.
5591
5592   If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5593
5594static HOST_WIDE_INT
5595count_type_elements (const_tree type, bool for_ctor_p)
5596{
5597  switch (TREE_CODE (type))
5598    {
5599    case ARRAY_TYPE:
5600      {
5601	tree nelts;
5602
5603	nelts = array_type_nelts (type);
5604	if (nelts && tree_fits_uhwi_p (nelts))
5605	  {
5606	    unsigned HOST_WIDE_INT n;
5607
5608	    n = tree_to_uhwi (nelts) + 1;
5609	    if (n == 0 || for_ctor_p)
5610	      return n;
5611	    else
5612	      return n * count_type_elements (TREE_TYPE (type), false);
5613	  }
5614	return for_ctor_p ? -1 : 1;
5615      }
5616
5617    case RECORD_TYPE:
5618      {
5619	unsigned HOST_WIDE_INT n;
5620	tree f;
5621
5622	n = 0;
5623	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5624	  if (TREE_CODE (f) == FIELD_DECL)
5625	    {
5626	      if (!for_ctor_p)
5627		n += count_type_elements (TREE_TYPE (f), false);
5628	      else if (!flexible_array_member_p (f, type))
5629		/* Don't count flexible arrays, which are not supposed
5630		   to be initialized.  */
5631		n += 1;
5632	    }
5633
5634	return n;
5635      }
5636
5637    case UNION_TYPE:
5638    case QUAL_UNION_TYPE:
5639      {
5640	tree f;
5641	HOST_WIDE_INT n, m;
5642
5643	gcc_assert (!for_ctor_p);
5644	/* Estimate the number of scalars in each field and pick the
5645	   maximum.  Other estimates would do instead; the idea is simply
5646	   to make sure that the estimate is not sensitive to the ordering
5647	   of the fields.  */
5648	n = 1;
5649	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5650	  if (TREE_CODE (f) == FIELD_DECL)
5651	    {
5652	      m = count_type_elements (TREE_TYPE (f), false);
5653	      /* If the field doesn't span the whole union, add an extra
5654		 scalar for the rest.  */
5655	      if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5656				    TYPE_SIZE (type)) != 1)
5657		m++;
5658	      if (n < m)
5659		n = m;
5660	    }
5661	return n;
5662      }
5663
5664    case COMPLEX_TYPE:
5665      return 2;
5666
5667    case VECTOR_TYPE:
5668      return TYPE_VECTOR_SUBPARTS (type);
5669
5670    case INTEGER_TYPE:
5671    case REAL_TYPE:
5672    case FIXED_POINT_TYPE:
5673    case ENUMERAL_TYPE:
5674    case BOOLEAN_TYPE:
5675    case POINTER_TYPE:
5676    case OFFSET_TYPE:
5677    case REFERENCE_TYPE:
5678    case NULLPTR_TYPE:
5679      return 1;
5680
5681    case ERROR_MARK:
5682      return 0;
5683
5684    case VOID_TYPE:
5685    case METHOD_TYPE:
5686    case FUNCTION_TYPE:
5687    case LANG_TYPE:
5688    default:
5689      gcc_unreachable ();
5690    }
5691}
5692
5693/* Helper for categorize_ctor_elements.  Identical interface.  */
5694
5695static bool
5696categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5697			    HOST_WIDE_INT *p_init_elts, bool *p_complete)
5698{
5699  unsigned HOST_WIDE_INT idx;
5700  HOST_WIDE_INT nz_elts, init_elts, num_fields;
5701  tree value, purpose, elt_type;
5702
5703  /* Whether CTOR is a valid constant initializer, in accordance with what
5704     initializer_constant_valid_p does.  If inferred from the constructor
5705     elements, true until proven otherwise.  */
5706  bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5707  bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5708
5709  nz_elts = 0;
5710  init_elts = 0;
5711  num_fields = 0;
5712  elt_type = NULL_TREE;
5713
5714  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5715    {
5716      HOST_WIDE_INT mult = 1;
5717
5718      if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5719	{
5720	  tree lo_index = TREE_OPERAND (purpose, 0);
5721	  tree hi_index = TREE_OPERAND (purpose, 1);
5722
5723	  if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5724	    mult = (tree_to_uhwi (hi_index)
5725		    - tree_to_uhwi (lo_index) + 1);
5726	}
5727      num_fields += mult;
5728      elt_type = TREE_TYPE (value);
5729
5730      switch (TREE_CODE (value))
5731	{
5732	case CONSTRUCTOR:
5733	  {
5734	    HOST_WIDE_INT nz = 0, ic = 0;
5735
5736	    bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5737							   p_complete);
5738
5739	    nz_elts += mult * nz;
5740 	    init_elts += mult * ic;
5741
5742	    if (const_from_elts_p && const_p)
5743	      const_p = const_elt_p;
5744	  }
5745	  break;
5746
5747	case INTEGER_CST:
5748	case REAL_CST:
5749	case FIXED_CST:
5750	  if (!initializer_zerop (value))
5751	    nz_elts += mult;
5752	  init_elts += mult;
5753	  break;
5754
5755	case STRING_CST:
5756	  nz_elts += mult * TREE_STRING_LENGTH (value);
5757	  init_elts += mult * TREE_STRING_LENGTH (value);
5758	  break;
5759
5760	case COMPLEX_CST:
5761	  if (!initializer_zerop (TREE_REALPART (value)))
5762	    nz_elts += mult;
5763	  if (!initializer_zerop (TREE_IMAGPART (value)))
5764	    nz_elts += mult;
5765	  init_elts += mult;
5766	  break;
5767
5768	case VECTOR_CST:
5769	  {
5770	    unsigned i;
5771	    for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5772	      {
5773		tree v = VECTOR_CST_ELT (value, i);
5774		if (!initializer_zerop (v))
5775		  nz_elts += mult;
5776		init_elts += mult;
5777	      }
5778	  }
5779	  break;
5780
5781	default:
5782	  {
5783	    HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5784	    nz_elts += mult * tc;
5785	    init_elts += mult * tc;
5786
5787	    if (const_from_elts_p && const_p)
5788	      const_p = initializer_constant_valid_p (value, elt_type)
5789			!= NULL_TREE;
5790	  }
5791	  break;
5792	}
5793    }
5794
5795  if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5796						num_fields, elt_type))
5797    *p_complete = false;
5798
5799  *p_nz_elts += nz_elts;
5800  *p_init_elts += init_elts;
5801
5802  return const_p;
5803}
5804
5805/* Examine CTOR to discover:
5806   * how many scalar fields are set to nonzero values,
5807     and place it in *P_NZ_ELTS;
5808   * how many scalar fields in total are in CTOR,
5809     and place it in *P_ELT_COUNT.
5810   * whether the constructor is complete -- in the sense that every
5811     meaningful byte is explicitly given a value --
5812     and place it in *P_COMPLETE.
5813
5814   Return whether or not CTOR is a valid static constant initializer, the same
5815   as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
5816
5817bool
5818categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5819			  HOST_WIDE_INT *p_init_elts, bool *p_complete)
5820{
5821  *p_nz_elts = 0;
5822  *p_init_elts = 0;
5823  *p_complete = true;
5824
5825  return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5826}
5827
5828/* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5829   of which had type LAST_TYPE.  Each element was itself a complete
5830   initializer, in the sense that every meaningful byte was explicitly
5831   given a value.  Return true if the same is true for the constructor
5832   as a whole.  */
5833
5834bool
5835complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5836			  const_tree last_type)
5837{
5838  if (TREE_CODE (type) == UNION_TYPE
5839      || TREE_CODE (type) == QUAL_UNION_TYPE)
5840    {
5841      if (num_elts == 0)
5842	return false;
5843
5844      gcc_assert (num_elts == 1 && last_type);
5845
5846      /* ??? We could look at each element of the union, and find the
5847	 largest element.  Which would avoid comparing the size of the
5848	 initialized element against any tail padding in the union.
5849	 Doesn't seem worth the effort...  */
5850      return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5851    }
5852
5853  return count_type_elements (type, true) == num_elts;
5854}
5855
5856/* Return 1 if EXP contains mostly (3/4)  zeros.  */
5857
5858static int
5859mostly_zeros_p (const_tree exp)
5860{
5861  if (TREE_CODE (exp) == CONSTRUCTOR)
5862    {
5863      HOST_WIDE_INT nz_elts, init_elts;
5864      bool complete_p;
5865
5866      categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5867      return !complete_p || nz_elts < init_elts / 4;
5868    }
5869
5870  return initializer_zerop (exp);
5871}
5872
5873/* Return 1 if EXP contains all zeros.  */
5874
5875static int
5876all_zeros_p (const_tree exp)
5877{
5878  if (TREE_CODE (exp) == CONSTRUCTOR)
5879    {
5880      HOST_WIDE_INT nz_elts, init_elts;
5881      bool complete_p;
5882
5883      categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5884      return nz_elts == 0;
5885    }
5886
5887  return initializer_zerop (exp);
5888}
5889
5890/* Helper function for store_constructor.
5891   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5892   CLEARED is as for store_constructor.
5893   ALIAS_SET is the alias set to use for any stores.
5894
5895   This provides a recursive shortcut back to store_constructor when it isn't
5896   necessary to go through store_field.  This is so that we can pass through
5897   the cleared field to let store_constructor know that we may not have to
5898   clear a substructure if the outer structure has already been cleared.  */
5899
5900static void
5901store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5902			 HOST_WIDE_INT bitpos, machine_mode mode,
5903			 tree exp, int cleared, alias_set_type alias_set)
5904{
5905  if (TREE_CODE (exp) == CONSTRUCTOR
5906      /* We can only call store_constructor recursively if the size and
5907	 bit position are on a byte boundary.  */
5908      && bitpos % BITS_PER_UNIT == 0
5909      && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5910      /* If we have a nonzero bitpos for a register target, then we just
5911	 let store_field do the bitfield handling.  This is unlikely to
5912	 generate unnecessary clear instructions anyways.  */
5913      && (bitpos == 0 || MEM_P (target)))
5914    {
5915      if (MEM_P (target))
5916	target
5917	  = adjust_address (target,
5918			    GET_MODE (target) == BLKmode
5919			    || 0 != (bitpos
5920				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
5921			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5922
5923
5924      /* Update the alias set, if required.  */
5925      if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5926	  && MEM_ALIAS_SET (target) != 0)
5927	{
5928	  target = copy_rtx (target);
5929	  set_mem_alias_set (target, alias_set);
5930	}
5931
5932      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5933    }
5934  else
5935    store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5936}
5937
5938
5939/* Returns the number of FIELD_DECLs in TYPE.  */
5940
5941static int
5942fields_length (const_tree type)
5943{
5944  tree t = TYPE_FIELDS (type);
5945  int count = 0;
5946
5947  for (; t; t = DECL_CHAIN (t))
5948    if (TREE_CODE (t) == FIELD_DECL)
5949      ++count;
5950
5951  return count;
5952}
5953
5954
5955/* Store the value of constructor EXP into the rtx TARGET.
5956   TARGET is either a REG or a MEM; we know it cannot conflict, since
5957   safe_from_p has been called.
5958   CLEARED is true if TARGET is known to have been zero'd.
5959   SIZE is the number of bytes of TARGET we are allowed to modify: this
5960   may not be the same as the size of EXP if we are assigning to a field
5961   which has been packed to exclude padding bits.  */
5962
5963static void
5964store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5965{
5966  tree type = TREE_TYPE (exp);
5967#ifdef WORD_REGISTER_OPERATIONS
5968  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5969#endif
5970
5971  switch (TREE_CODE (type))
5972    {
5973    case RECORD_TYPE:
5974    case UNION_TYPE:
5975    case QUAL_UNION_TYPE:
5976      {
5977	unsigned HOST_WIDE_INT idx;
5978	tree field, value;
5979
5980	/* If size is zero or the target is already cleared, do nothing.  */
5981	if (size == 0 || cleared)
5982	  cleared = 1;
5983	/* We either clear the aggregate or indicate the value is dead.  */
5984	else if ((TREE_CODE (type) == UNION_TYPE
5985		  || TREE_CODE (type) == QUAL_UNION_TYPE)
5986		 && ! CONSTRUCTOR_ELTS (exp))
5987	  /* If the constructor is empty, clear the union.  */
5988	  {
5989	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5990	    cleared = 1;
5991	  }
5992
5993	/* If we are building a static constructor into a register,
5994	   set the initial value as zero so we can fold the value into
5995	   a constant.  But if more than one register is involved,
5996	   this probably loses.  */
5997	else if (REG_P (target) && TREE_STATIC (exp)
5998		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5999	  {
6000	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6001	    cleared = 1;
6002	  }
6003
6004        /* If the constructor has fewer fields than the structure or
6005	   if we are initializing the structure to mostly zeros, clear
6006	   the whole structure first.  Don't do this if TARGET is a
6007	   register whose mode size isn't equal to SIZE since
6008	   clear_storage can't handle this case.  */
6009	else if (size > 0
6010		 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6011		      != fields_length (type))
6012		     || mostly_zeros_p (exp))
6013		 && (!REG_P (target)
6014		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6015			 == size)))
6016	  {
6017	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6018	    cleared = 1;
6019	  }
6020
6021	if (REG_P (target) && !cleared)
6022	  emit_clobber (target);
6023
6024	/* Store each element of the constructor into the
6025	   corresponding field of TARGET.  */
6026	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6027	  {
6028	    machine_mode mode;
6029	    HOST_WIDE_INT bitsize;
6030	    HOST_WIDE_INT bitpos = 0;
6031	    tree offset;
6032	    rtx to_rtx = target;
6033
6034	    /* Just ignore missing fields.  We cleared the whole
6035	       structure, above, if any fields are missing.  */
6036	    if (field == 0)
6037	      continue;
6038
6039	    if (cleared && initializer_zerop (value))
6040	      continue;
6041
6042	    if (tree_fits_uhwi_p (DECL_SIZE (field)))
6043	      bitsize = tree_to_uhwi (DECL_SIZE (field));
6044	    else
6045	      bitsize = -1;
6046
6047	    mode = DECL_MODE (field);
6048	    if (DECL_BIT_FIELD (field))
6049	      mode = VOIDmode;
6050
6051	    offset = DECL_FIELD_OFFSET (field);
6052	    if (tree_fits_shwi_p (offset)
6053		&& tree_fits_shwi_p (bit_position (field)))
6054	      {
6055		bitpos = int_bit_position (field);
6056		offset = 0;
6057	      }
6058	    else
6059	      bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6060
6061	    if (offset)
6062	      {
6063	        machine_mode address_mode;
6064		rtx offset_rtx;
6065
6066		offset
6067		  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6068						    make_tree (TREE_TYPE (exp),
6069							       target));
6070
6071		offset_rtx = expand_normal (offset);
6072		gcc_assert (MEM_P (to_rtx));
6073
6074		address_mode = get_address_mode (to_rtx);
6075		if (GET_MODE (offset_rtx) != address_mode)
6076		  offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6077
6078		to_rtx = offset_address (to_rtx, offset_rtx,
6079					 highest_pow2_factor (offset));
6080	      }
6081
6082#ifdef WORD_REGISTER_OPERATIONS
6083	    /* If this initializes a field that is smaller than a
6084	       word, at the start of a word, try to widen it to a full
6085	       word.  This special case allows us to output C++ member
6086	       function initializations in a form that the optimizers
6087	       can understand.  */
6088	    if (REG_P (target)
6089		&& bitsize < BITS_PER_WORD
6090		&& bitpos % BITS_PER_WORD == 0
6091		&& GET_MODE_CLASS (mode) == MODE_INT
6092		&& TREE_CODE (value) == INTEGER_CST
6093		&& exp_size >= 0
6094		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6095	      {
6096		tree type = TREE_TYPE (value);
6097
6098		if (TYPE_PRECISION (type) < BITS_PER_WORD)
6099		  {
6100		    type = lang_hooks.types.type_for_mode
6101		      (word_mode, TYPE_UNSIGNED (type));
6102		    value = fold_convert (type, value);
6103		  }
6104
6105		if (BYTES_BIG_ENDIAN)
6106		  value
6107		   = fold_build2 (LSHIFT_EXPR, type, value,
6108				   build_int_cst (type,
6109						  BITS_PER_WORD - bitsize));
6110		bitsize = BITS_PER_WORD;
6111		mode = word_mode;
6112	      }
6113#endif
6114
6115	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6116		&& DECL_NONADDRESSABLE_P (field))
6117	      {
6118		to_rtx = copy_rtx (to_rtx);
6119		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6120	      }
6121
6122	    store_constructor_field (to_rtx, bitsize, bitpos, mode,
6123				     value, cleared,
6124				     get_alias_set (TREE_TYPE (field)));
6125	  }
6126	break;
6127      }
6128    case ARRAY_TYPE:
6129      {
6130	tree value, index;
6131	unsigned HOST_WIDE_INT i;
6132	int need_to_clear;
6133	tree domain;
6134	tree elttype = TREE_TYPE (type);
6135	int const_bounds_p;
6136	HOST_WIDE_INT minelt = 0;
6137	HOST_WIDE_INT maxelt = 0;
6138
6139	domain = TYPE_DOMAIN (type);
6140	const_bounds_p = (TYPE_MIN_VALUE (domain)
6141			  && TYPE_MAX_VALUE (domain)
6142			  && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6143			  && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6144
6145	/* If we have constant bounds for the range of the type, get them.  */
6146	if (const_bounds_p)
6147	  {
6148	    minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6149	    maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6150	  }
6151
6152	/* If the constructor has fewer elements than the array, clear
6153           the whole array first.  Similarly if this is static
6154           constructor of a non-BLKmode object.  */
6155	if (cleared)
6156	  need_to_clear = 0;
6157	else if (REG_P (target) && TREE_STATIC (exp))
6158	  need_to_clear = 1;
6159	else
6160	  {
6161	    unsigned HOST_WIDE_INT idx;
6162	    tree index, value;
6163	    HOST_WIDE_INT count = 0, zero_count = 0;
6164	    need_to_clear = ! const_bounds_p;
6165
6166	    /* This loop is a more accurate version of the loop in
6167	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
6168	       is also needed to check for missing elements.  */
6169	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6170	      {
6171		HOST_WIDE_INT this_node_count;
6172
6173		if (need_to_clear)
6174		  break;
6175
6176		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6177		  {
6178		    tree lo_index = TREE_OPERAND (index, 0);
6179		    tree hi_index = TREE_OPERAND (index, 1);
6180
6181		    if (! tree_fits_uhwi_p (lo_index)
6182			|| ! tree_fits_uhwi_p (hi_index))
6183		      {
6184			need_to_clear = 1;
6185			break;
6186		      }
6187
6188		    this_node_count = (tree_to_uhwi (hi_index)
6189				       - tree_to_uhwi (lo_index) + 1);
6190		  }
6191		else
6192		  this_node_count = 1;
6193
6194		count += this_node_count;
6195		if (mostly_zeros_p (value))
6196		  zero_count += this_node_count;
6197	      }
6198
6199	    /* Clear the entire array first if there are any missing
6200	       elements, or if the incidence of zero elements is >=
6201	       75%.  */
6202	    if (! need_to_clear
6203		&& (count < maxelt - minelt + 1
6204		    || 4 * zero_count >= 3 * count))
6205	      need_to_clear = 1;
6206	  }
6207
6208	if (need_to_clear && size > 0)
6209	  {
6210	    if (REG_P (target))
6211	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
6212	    else
6213	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6214	    cleared = 1;
6215	  }
6216
6217	if (!cleared && REG_P (target))
6218	  /* Inform later passes that the old value is dead.  */
6219	  emit_clobber (target);
6220
6221	/* Store each element of the constructor into the
6222	   corresponding element of TARGET, determined by counting the
6223	   elements.  */
6224	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6225	  {
6226	    machine_mode mode;
6227	    HOST_WIDE_INT bitsize;
6228	    HOST_WIDE_INT bitpos;
6229	    rtx xtarget = target;
6230
6231	    if (cleared && initializer_zerop (value))
6232	      continue;
6233
6234	    mode = TYPE_MODE (elttype);
6235	    if (mode == BLKmode)
6236	      bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6237			 ? tree_to_uhwi (TYPE_SIZE (elttype))
6238			 : -1);
6239	    else
6240	      bitsize = GET_MODE_BITSIZE (mode);
6241
6242	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6243	      {
6244		tree lo_index = TREE_OPERAND (index, 0);
6245		tree hi_index = TREE_OPERAND (index, 1);
6246		rtx index_r, pos_rtx;
6247		HOST_WIDE_INT lo, hi, count;
6248		tree position;
6249
6250		/* If the range is constant and "small", unroll the loop.  */
6251		if (const_bounds_p
6252		    && tree_fits_shwi_p (lo_index)
6253		    && tree_fits_shwi_p (hi_index)
6254		    && (lo = tree_to_shwi (lo_index),
6255			hi = tree_to_shwi (hi_index),
6256			count = hi - lo + 1,
6257			(!MEM_P (target)
6258			 || count <= 2
6259			 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6260			     && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6261				 <= 40 * 8)))))
6262		  {
6263		    lo -= minelt;  hi -= minelt;
6264		    for (; lo <= hi; lo++)
6265		      {
6266			bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6267
6268			if (MEM_P (target)
6269			    && !MEM_KEEP_ALIAS_SET_P (target)
6270			    && TREE_CODE (type) == ARRAY_TYPE
6271			    && TYPE_NONALIASED_COMPONENT (type))
6272			  {
6273			    target = copy_rtx (target);
6274			    MEM_KEEP_ALIAS_SET_P (target) = 1;
6275			  }
6276
6277			store_constructor_field
6278			  (target, bitsize, bitpos, mode, value, cleared,
6279			   get_alias_set (elttype));
6280		      }
6281		  }
6282		else
6283		  {
6284		    rtx_code_label *loop_start = gen_label_rtx ();
6285		    rtx_code_label *loop_end = gen_label_rtx ();
6286		    tree exit_cond;
6287
6288		    expand_normal (hi_index);
6289
6290		    index = build_decl (EXPR_LOCATION (exp),
6291					VAR_DECL, NULL_TREE, domain);
6292		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6293		    SET_DECL_RTL (index, index_r);
6294		    store_expr (lo_index, index_r, 0, false);
6295
6296		    /* Build the head of the loop.  */
6297		    do_pending_stack_adjust ();
6298		    emit_label (loop_start);
6299
6300		    /* Assign value to element index.  */
6301		    position =
6302		      fold_convert (ssizetype,
6303				    fold_build2 (MINUS_EXPR,
6304						 TREE_TYPE (index),
6305						 index,
6306						 TYPE_MIN_VALUE (domain)));
6307
6308		    position =
6309			size_binop (MULT_EXPR, position,
6310				    fold_convert (ssizetype,
6311						  TYPE_SIZE_UNIT (elttype)));
6312
6313		    pos_rtx = expand_normal (position);
6314		    xtarget = offset_address (target, pos_rtx,
6315					      highest_pow2_factor (position));
6316		    xtarget = adjust_address (xtarget, mode, 0);
6317		    if (TREE_CODE (value) == CONSTRUCTOR)
6318		      store_constructor (value, xtarget, cleared,
6319					 bitsize / BITS_PER_UNIT);
6320		    else
6321		      store_expr (value, xtarget, 0, false);
6322
6323		    /* Generate a conditional jump to exit the loop.  */
6324		    exit_cond = build2 (LT_EXPR, integer_type_node,
6325					index, hi_index);
6326		    jumpif (exit_cond, loop_end, -1);
6327
6328		    /* Update the loop counter, and jump to the head of
6329		       the loop.  */
6330		    expand_assignment (index,
6331				       build2 (PLUS_EXPR, TREE_TYPE (index),
6332					       index, integer_one_node),
6333				       false);
6334
6335		    emit_jump (loop_start);
6336
6337		    /* Build the end of the loop.  */
6338		    emit_label (loop_end);
6339		  }
6340	      }
6341	    else if ((index != 0 && ! tree_fits_shwi_p (index))
6342		     || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6343	      {
6344		tree position;
6345
6346		if (index == 0)
6347		  index = ssize_int (1);
6348
6349		if (minelt)
6350		  index = fold_convert (ssizetype,
6351					fold_build2 (MINUS_EXPR,
6352						     TREE_TYPE (index),
6353						     index,
6354						     TYPE_MIN_VALUE (domain)));
6355
6356		position =
6357		  size_binop (MULT_EXPR, index,
6358			      fold_convert (ssizetype,
6359					    TYPE_SIZE_UNIT (elttype)));
6360		xtarget = offset_address (target,
6361					  expand_normal (position),
6362					  highest_pow2_factor (position));
6363		xtarget = adjust_address (xtarget, mode, 0);
6364		store_expr (value, xtarget, 0, false);
6365	      }
6366	    else
6367	      {
6368		if (index != 0)
6369		  bitpos = ((tree_to_shwi (index) - minelt)
6370			    * tree_to_uhwi (TYPE_SIZE (elttype)));
6371		else
6372		  bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6373
6374		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6375		    && TREE_CODE (type) == ARRAY_TYPE
6376		    && TYPE_NONALIASED_COMPONENT (type))
6377		  {
6378		    target = copy_rtx (target);
6379		    MEM_KEEP_ALIAS_SET_P (target) = 1;
6380		  }
6381		store_constructor_field (target, bitsize, bitpos, mode, value,
6382					 cleared, get_alias_set (elttype));
6383	      }
6384	  }
6385	break;
6386      }
6387
6388    case VECTOR_TYPE:
6389      {
6390	unsigned HOST_WIDE_INT idx;
6391	constructor_elt *ce;
6392	int i;
6393	int need_to_clear;
6394	int icode = CODE_FOR_nothing;
6395	tree elttype = TREE_TYPE (type);
6396	int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6397	machine_mode eltmode = TYPE_MODE (elttype);
6398	HOST_WIDE_INT bitsize;
6399	HOST_WIDE_INT bitpos;
6400	rtvec vector = NULL;
6401	unsigned n_elts;
6402	alias_set_type alias;
6403
6404	gcc_assert (eltmode != BLKmode);
6405
6406	n_elts = TYPE_VECTOR_SUBPARTS (type);
6407	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6408	  {
6409	    machine_mode mode = GET_MODE (target);
6410
6411	    icode = (int) optab_handler (vec_init_optab, mode);
6412	    /* Don't use vec_init<mode> if some elements have VECTOR_TYPE.  */
6413	    if (icode != CODE_FOR_nothing)
6414	      {
6415		tree value;
6416
6417		FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6418		  if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6419		    {
6420		      icode = CODE_FOR_nothing;
6421		      break;
6422		    }
6423	      }
6424	    if (icode != CODE_FOR_nothing)
6425	      {
6426		unsigned int i;
6427
6428		vector = rtvec_alloc (n_elts);
6429		for (i = 0; i < n_elts; i++)
6430		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6431	      }
6432	  }
6433
6434	/* If the constructor has fewer elements than the vector,
6435	   clear the whole array first.  Similarly if this is static
6436	   constructor of a non-BLKmode object.  */
6437	if (cleared)
6438	  need_to_clear = 0;
6439	else if (REG_P (target) && TREE_STATIC (exp))
6440	  need_to_clear = 1;
6441	else
6442	  {
6443	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6444	    tree value;
6445
6446	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6447	      {
6448		int n_elts_here = tree_to_uhwi
6449		  (int_const_binop (TRUNC_DIV_EXPR,
6450				    TYPE_SIZE (TREE_TYPE (value)),
6451				    TYPE_SIZE (elttype)));
6452
6453		count += n_elts_here;
6454		if (mostly_zeros_p (value))
6455		  zero_count += n_elts_here;
6456	      }
6457
6458	    /* Clear the entire vector first if there are any missing elements,
6459	       or if the incidence of zero elements is >= 75%.  */
6460	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6461	  }
6462
6463	if (need_to_clear && size > 0 && !vector)
6464	  {
6465	    if (REG_P (target))
6466	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6467	    else
6468	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6469	    cleared = 1;
6470	  }
6471
6472	/* Inform later passes that the old value is dead.  */
6473	if (!cleared && !vector && REG_P (target))
6474	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6475
6476        if (MEM_P (target))
6477	  alias = MEM_ALIAS_SET (target);
6478	else
6479	  alias = get_alias_set (elttype);
6480
6481        /* Store each element of the constructor into the corresponding
6482	   element of TARGET, determined by counting the elements.  */
6483	for (idx = 0, i = 0;
6484	     vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6485	     idx++, i += bitsize / elt_size)
6486	  {
6487	    HOST_WIDE_INT eltpos;
6488	    tree value = ce->value;
6489
6490	    bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6491	    if (cleared && initializer_zerop (value))
6492	      continue;
6493
6494	    if (ce->index)
6495	      eltpos = tree_to_uhwi (ce->index);
6496	    else
6497	      eltpos = i;
6498
6499	    if (vector)
6500	      {
6501		/* vec_init<mode> should not be used if there are VECTOR_TYPE
6502		   elements.  */
6503		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6504		RTVEC_ELT (vector, eltpos)
6505		  = expand_normal (value);
6506	      }
6507	    else
6508	      {
6509		machine_mode value_mode =
6510		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6511		  ? TYPE_MODE (TREE_TYPE (value))
6512		  : eltmode;
6513		bitpos = eltpos * elt_size;
6514		store_constructor_field (target, bitsize, bitpos, value_mode,
6515					 value, cleared, alias);
6516	      }
6517	  }
6518
6519	if (vector)
6520	  emit_insn (GEN_FCN (icode)
6521		     (target,
6522		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
6523	break;
6524      }
6525
6526    default:
6527      gcc_unreachable ();
6528    }
6529}
6530
6531/* Store the value of EXP (an expression tree)
6532   into a subfield of TARGET which has mode MODE and occupies
6533   BITSIZE bits, starting BITPOS bits from the start of TARGET.
6534   If MODE is VOIDmode, it means that we are storing into a bit-field.
6535
6536   BITREGION_START is bitpos of the first bitfield in this region.
6537   BITREGION_END is the bitpos of the ending bitfield in this region.
6538   These two fields are 0, if the C++ memory model does not apply,
6539   or we are not interested in keeping track of bitfield regions.
6540
6541   Always return const0_rtx unless we have something particular to
6542   return.
6543
6544   ALIAS_SET is the alias set for the destination.  This value will
6545   (in general) be different from that for TARGET, since TARGET is a
6546   reference to the containing structure.
6547
6548   If NONTEMPORAL is true, try generating a nontemporal store.  */
6549
6550static rtx
6551store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6552	     unsigned HOST_WIDE_INT bitregion_start,
6553	     unsigned HOST_WIDE_INT bitregion_end,
6554	     machine_mode mode, tree exp,
6555	     alias_set_type alias_set, bool nontemporal)
6556{
6557  if (TREE_CODE (exp) == ERROR_MARK)
6558    return const0_rtx;
6559
6560  /* If we have nothing to store, do nothing unless the expression has
6561     side-effects.  */
6562  if (bitsize == 0)
6563    return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6564
6565  if (GET_CODE (target) == CONCAT)
6566    {
6567      /* We're storing into a struct containing a single __complex.  */
6568
6569      gcc_assert (!bitpos);
6570      return store_expr (exp, target, 0, nontemporal);
6571    }
6572
6573  /* If the structure is in a register or if the component
6574     is a bit field, we cannot use addressing to access it.
6575     Use bit-field techniques or SUBREG to store in it.  */
6576
6577  if (mode == VOIDmode
6578      || (mode != BLKmode && ! direct_store[(int) mode]
6579	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6580	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6581      || REG_P (target)
6582      || GET_CODE (target) == SUBREG
6583      /* If the field isn't aligned enough to store as an ordinary memref,
6584	 store it as a bit field.  */
6585      || (mode != BLKmode
6586	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6587		|| bitpos % GET_MODE_ALIGNMENT (mode))
6588	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6589	      || (bitpos % BITS_PER_UNIT != 0)))
6590      || (bitsize >= 0 && mode != BLKmode
6591	  && GET_MODE_BITSIZE (mode) > bitsize)
6592      /* If the RHS and field are a constant size and the size of the
6593	 RHS isn't the same size as the bitfield, we must use bitfield
6594	 operations.  */
6595      || (bitsize >= 0
6596	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6597	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6598      /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6599         decl we must use bitfield operations.  */
6600      || (bitsize >= 0
6601	  && TREE_CODE (exp) == MEM_REF
6602	  && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6603	  && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6604	  && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6605	  && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6606    {
6607      rtx temp;
6608      gimple nop_def;
6609
6610      /* If EXP is a NOP_EXPR of precision less than its mode, then that
6611	 implies a mask operation.  If the precision is the same size as
6612	 the field we're storing into, that mask is redundant.  This is
6613	 particularly common with bit field assignments generated by the
6614	 C front end.  */
6615      nop_def = get_def_for_expr (exp, NOP_EXPR);
6616      if (nop_def)
6617	{
6618	  tree type = TREE_TYPE (exp);
6619	  if (INTEGRAL_TYPE_P (type)
6620	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6621	      && bitsize == TYPE_PRECISION (type))
6622	    {
6623	      tree op = gimple_assign_rhs1 (nop_def);
6624	      type = TREE_TYPE (op);
6625	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6626		exp = op;
6627	    }
6628	}
6629
6630      temp = expand_normal (exp);
6631
6632      /* If BITSIZE is narrower than the size of the type of EXP
6633	 we will be narrowing TEMP.  Normally, what's wanted are the
6634	 low-order bits.  However, if EXP's type is a record and this is
6635	 big-endian machine, we want the upper BITSIZE bits.  */
6636      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6637	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6638	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6639	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6640			     GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6641			     NULL_RTX, 1);
6642
6643      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE.  */
6644      if (mode != VOIDmode && mode != BLKmode
6645	  && mode != TYPE_MODE (TREE_TYPE (exp)))
6646	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6647
6648      /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6649	 are both BLKmode, both must be in memory and BITPOS must be aligned
6650	 on a byte boundary.  If so, we simply do a block copy.  Likewise for
6651	 a BLKmode-like TARGET.  */
6652      if (GET_CODE (temp) != PARALLEL
6653	  && GET_MODE (temp) == BLKmode
6654	  && (GET_MODE (target) == BLKmode
6655	      || (MEM_P (target)
6656		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6657		  && (bitpos % BITS_PER_UNIT) == 0
6658		  && (bitsize % BITS_PER_UNIT) == 0)))
6659	{
6660	  gcc_assert (MEM_P (target) && MEM_P (temp)
6661		      && (bitpos % BITS_PER_UNIT) == 0);
6662
6663	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6664	  emit_block_move (target, temp,
6665			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6666				    / BITS_PER_UNIT),
6667			   BLOCK_OP_NORMAL);
6668
6669	  return const0_rtx;
6670	}
6671
6672      /* Handle calls that return values in multiple non-contiguous locations.
6673	 The Irix 6 ABI has examples of this.  */
6674      if (GET_CODE (temp) == PARALLEL)
6675	{
6676	  HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6677	  rtx temp_target;
6678	  if (mode == BLKmode || mode == VOIDmode)
6679	    mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6680	  temp_target = gen_reg_rtx (mode);
6681	  emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6682	  temp = temp_target;
6683	}
6684      else if (mode == BLKmode)
6685	{
6686	  /* Handle calls that return BLKmode values in registers.  */
6687	  if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6688	    {
6689	      rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6690	      copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6691	      temp = temp_target;
6692	    }
6693	  else
6694	    {
6695	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6696	      rtx temp_target;
6697	      mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6698	      temp_target = gen_reg_rtx (mode);
6699	      temp_target
6700	        = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6701				     temp_target, mode, mode);
6702	      temp = temp_target;
6703	    }
6704	}
6705
6706      /* Store the value in the bitfield.  */
6707      store_bit_field (target, bitsize, bitpos,
6708		       bitregion_start, bitregion_end,
6709		       mode, temp);
6710
6711      return const0_rtx;
6712    }
6713  else
6714    {
6715      /* Now build a reference to just the desired component.  */
6716      rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6717
6718      if (to_rtx == target)
6719	to_rtx = copy_rtx (to_rtx);
6720
6721      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6722	set_mem_alias_set (to_rtx, alias_set);
6723
6724      return store_expr (exp, to_rtx, 0, nontemporal);
6725    }
6726}
6727
6728/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6729   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6730   codes and find the ultimate containing object, which we return.
6731
6732   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6733   bit position, and *PUNSIGNEDP to the signedness of the field.
6734   If the position of the field is variable, we store a tree
6735   giving the variable offset (in units) in *POFFSET.
6736   This offset is in addition to the bit position.
6737   If the position is not variable, we store 0 in *POFFSET.
6738
6739   If any of the extraction expressions is volatile,
6740   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
6741
6742   If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6743   Otherwise, it is a mode that can be used to access the field.
6744
6745   If the field describes a variable-sized object, *PMODE is set to
6746   BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
6747   this case, but the address of the object can be found.
6748
6749   If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6750   look through nodes that serve as markers of a greater alignment than
6751   the one that can be deduced from the expression.  These nodes make it
6752   possible for front-ends to prevent temporaries from being created by
6753   the middle-end on alignment considerations.  For that purpose, the
6754   normal operating mode at high-level is to always pass FALSE so that
6755   the ultimate containing object is really returned; moreover, the
6756   associated predicate handled_component_p will always return TRUE
6757   on these nodes, thus indicating that they are essentially handled
6758   by get_inner_reference.  TRUE should only be passed when the caller
6759   is scanning the expression in order to build another representation
6760   and specifically knows how to handle these nodes; as such, this is
6761   the normal operating mode in the RTL expanders.  */
6762
6763tree
6764get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6765		     HOST_WIDE_INT *pbitpos, tree *poffset,
6766		     machine_mode *pmode, int *punsignedp,
6767		     int *pvolatilep, bool keep_aligning)
6768{
6769  tree size_tree = 0;
6770  machine_mode mode = VOIDmode;
6771  bool blkmode_bitfield = false;
6772  tree offset = size_zero_node;
6773  offset_int bit_offset = 0;
6774
6775  /* First get the mode, signedness, and size.  We do this from just the
6776     outermost expression.  */
6777  *pbitsize = -1;
6778  if (TREE_CODE (exp) == COMPONENT_REF)
6779    {
6780      tree field = TREE_OPERAND (exp, 1);
6781      size_tree = DECL_SIZE (field);
6782      if (flag_strict_volatile_bitfields > 0
6783	  && TREE_THIS_VOLATILE (exp)
6784	  && DECL_BIT_FIELD_TYPE (field)
6785	  && DECL_MODE (field) != BLKmode)
6786	/* Volatile bitfields should be accessed in the mode of the
6787	     field's type, not the mode computed based on the bit
6788	     size.  */
6789	mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6790      else if (!DECL_BIT_FIELD (field))
6791	mode = DECL_MODE (field);
6792      else if (DECL_MODE (field) == BLKmode)
6793	blkmode_bitfield = true;
6794
6795      *punsignedp = DECL_UNSIGNED (field);
6796    }
6797  else if (TREE_CODE (exp) == BIT_FIELD_REF)
6798    {
6799      size_tree = TREE_OPERAND (exp, 1);
6800      *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6801		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
6802
6803      /* For vector types, with the correct size of access, use the mode of
6804	 inner type.  */
6805      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6806	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6807	  && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6808        mode = TYPE_MODE (TREE_TYPE (exp));
6809    }
6810  else
6811    {
6812      mode = TYPE_MODE (TREE_TYPE (exp));
6813      *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6814
6815      if (mode == BLKmode)
6816	size_tree = TYPE_SIZE (TREE_TYPE (exp));
6817      else
6818	*pbitsize = GET_MODE_BITSIZE (mode);
6819    }
6820
6821  if (size_tree != 0)
6822    {
6823      if (! tree_fits_uhwi_p (size_tree))
6824	mode = BLKmode, *pbitsize = -1;
6825      else
6826	*pbitsize = tree_to_uhwi (size_tree);
6827    }
6828
6829  /* Compute cumulative bit-offset for nested component-refs and array-refs,
6830     and find the ultimate containing object.  */
6831  while (1)
6832    {
6833      switch (TREE_CODE (exp))
6834	{
6835	case BIT_FIELD_REF:
6836	  bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6837	  break;
6838
6839	case COMPONENT_REF:
6840	  {
6841	    tree field = TREE_OPERAND (exp, 1);
6842	    tree this_offset = component_ref_field_offset (exp);
6843
6844	    /* If this field hasn't been filled in yet, don't go past it.
6845	       This should only happen when folding expressions made during
6846	       type construction.  */
6847	    if (this_offset == 0)
6848	      break;
6849
6850	    offset = size_binop (PLUS_EXPR, offset, this_offset);
6851	    bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6852
6853	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
6854	  }
6855	  break;
6856
6857	case ARRAY_REF:
6858	case ARRAY_RANGE_REF:
6859	  {
6860	    tree index = TREE_OPERAND (exp, 1);
6861	    tree low_bound = array_ref_low_bound (exp);
6862	    tree unit_size = array_ref_element_size (exp);
6863
6864	    /* We assume all arrays have sizes that are a multiple of a byte.
6865	       First subtract the lower bound, if any, in the type of the
6866	       index, then convert to sizetype and multiply by the size of
6867	       the array element.  */
6868	    if (! integer_zerop (low_bound))
6869	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6870				   index, low_bound);
6871
6872	    offset = size_binop (PLUS_EXPR, offset,
6873			         size_binop (MULT_EXPR,
6874					     fold_convert (sizetype, index),
6875					     unit_size));
6876	  }
6877	  break;
6878
6879	case REALPART_EXPR:
6880	  break;
6881
6882	case IMAGPART_EXPR:
6883	  bit_offset += *pbitsize;
6884	  break;
6885
6886	case VIEW_CONVERT_EXPR:
6887	  if (keep_aligning && STRICT_ALIGNMENT
6888	      && (TYPE_ALIGN (TREE_TYPE (exp))
6889	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6890	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6891		  < BIGGEST_ALIGNMENT)
6892	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6893		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6894	    goto done;
6895	  break;
6896
6897	case MEM_REF:
6898	  /* Hand back the decl for MEM[&decl, off].  */
6899	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6900	    {
6901	      tree off = TREE_OPERAND (exp, 1);
6902	      if (!integer_zerop (off))
6903		{
6904		  offset_int boff, coff = mem_ref_offset (exp);
6905		  boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6906		  bit_offset += boff;
6907		}
6908	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6909	    }
6910	  goto done;
6911
6912	default:
6913	  goto done;
6914	}
6915
6916      /* If any reference in the chain is volatile, the effect is volatile.  */
6917      if (TREE_THIS_VOLATILE (exp))
6918	*pvolatilep = 1;
6919
6920      exp = TREE_OPERAND (exp, 0);
6921    }
6922 done:
6923
6924  /* If OFFSET is constant, see if we can return the whole thing as a
6925     constant bit position.  Make sure to handle overflow during
6926     this conversion.  */
6927  if (TREE_CODE (offset) == INTEGER_CST)
6928    {
6929      offset_int tem = wi::sext (wi::to_offset (offset),
6930				 TYPE_PRECISION (sizetype));
6931      tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6932      tem += bit_offset;
6933      if (wi::fits_shwi_p (tem))
6934	{
6935	  *pbitpos = tem.to_shwi ();
6936	  *poffset = offset = NULL_TREE;
6937	}
6938    }
6939
6940  /* Otherwise, split it up.  */
6941  if (offset)
6942    {
6943      /* Avoid returning a negative bitpos as this may wreak havoc later.  */
6944      if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6945        {
6946	  offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6947	  offset_int tem = bit_offset.and_not (mask);
6948	  /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6949	     Subtract it to BIT_OFFSET and add it (scaled) to OFFSET.  */
6950	  bit_offset -= tem;
6951	  tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6952	  offset = size_binop (PLUS_EXPR, offset,
6953			       wide_int_to_tree (sizetype, tem));
6954	}
6955
6956      *pbitpos = bit_offset.to_shwi ();
6957      *poffset = offset;
6958    }
6959
6960  /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
6961  if (mode == VOIDmode
6962      && blkmode_bitfield
6963      && (*pbitpos % BITS_PER_UNIT) == 0
6964      && (*pbitsize % BITS_PER_UNIT) == 0)
6965    *pmode = BLKmode;
6966  else
6967    *pmode = mode;
6968
6969  return exp;
6970}
6971
6972/* Return a tree of sizetype representing the size, in bytes, of the element
6973   of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6974
6975tree
6976array_ref_element_size (tree exp)
6977{
6978  tree aligned_size = TREE_OPERAND (exp, 3);
6979  tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6980  location_t loc = EXPR_LOCATION (exp);
6981
6982  /* If a size was specified in the ARRAY_REF, it's the size measured
6983     in alignment units of the element type.  So multiply by that value.  */
6984  if (aligned_size)
6985    {
6986      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6987	 sizetype from another type of the same width and signedness.  */
6988      if (TREE_TYPE (aligned_size) != sizetype)
6989	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6990      return size_binop_loc (loc, MULT_EXPR, aligned_size,
6991			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
6992    }
6993
6994  /* Otherwise, take the size from that of the element type.  Substitute
6995     any PLACEHOLDER_EXPR that we have.  */
6996  else
6997    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6998}
6999
7000/* Return a tree representing the lower bound of the array mentioned in
7001   EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
7002
7003tree
7004array_ref_low_bound (tree exp)
7005{
7006  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7007
7008  /* If a lower bound is specified in EXP, use it.  */
7009  if (TREE_OPERAND (exp, 2))
7010    return TREE_OPERAND (exp, 2);
7011
7012  /* Otherwise, if there is a domain type and it has a lower bound, use it,
7013     substituting for a PLACEHOLDER_EXPR as needed.  */
7014  if (domain_type && TYPE_MIN_VALUE (domain_type))
7015    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
7016
7017  /* Otherwise, return a zero of the appropriate type.  */
7018  return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
7019}
7020
7021/* Returns true if REF is an array reference to an array at the end of
7022   a structure.  If this is the case, the array may be allocated larger
7023   than its upper bound implies.  */
7024
7025bool
7026array_at_struct_end_p (tree ref)
7027{
7028  if (TREE_CODE (ref) != ARRAY_REF
7029      && TREE_CODE (ref) != ARRAY_RANGE_REF)
7030    return false;
7031
7032  while (handled_component_p (ref))
7033    {
7034      /* If the reference chain contains a component reference to a
7035         non-union type and there follows another field the reference
7036	 is not at the end of a structure.  */
7037      if (TREE_CODE (ref) == COMPONENT_REF
7038	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
7039	{
7040	  tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
7041	  while (nextf && TREE_CODE (nextf) != FIELD_DECL)
7042	    nextf = DECL_CHAIN (nextf);
7043	  if (nextf)
7044	    return false;
7045	}
7046
7047      ref = TREE_OPERAND (ref, 0);
7048    }
7049
7050  /* If the reference is based on a declared entity, the size of the array
7051     is constrained by its given domain.  */
7052  if (DECL_P (ref))
7053    return false;
7054
7055  return true;
7056}
7057
7058/* Return a tree representing the upper bound of the array mentioned in
7059   EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
7060
7061tree
7062array_ref_up_bound (tree exp)
7063{
7064  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7065
7066  /* If there is a domain type and it has an upper bound, use it, substituting
7067     for a PLACEHOLDER_EXPR as needed.  */
7068  if (domain_type && TYPE_MAX_VALUE (domain_type))
7069    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7070
7071  /* Otherwise fail.  */
7072  return NULL_TREE;
7073}
7074
7075/* Return a tree representing the offset, in bytes, of the field referenced
7076   by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
7077
7078tree
7079component_ref_field_offset (tree exp)
7080{
7081  tree aligned_offset = TREE_OPERAND (exp, 2);
7082  tree field = TREE_OPERAND (exp, 1);
7083  location_t loc = EXPR_LOCATION (exp);
7084
7085  /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7086     in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
7087     value.  */
7088  if (aligned_offset)
7089    {
7090      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7091	 sizetype from another type of the same width and signedness.  */
7092      if (TREE_TYPE (aligned_offset) != sizetype)
7093	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7094      return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7095			     size_int (DECL_OFFSET_ALIGN (field)
7096				       / BITS_PER_UNIT));
7097    }
7098
7099  /* Otherwise, take the offset from that of the field.  Substitute
7100     any PLACEHOLDER_EXPR that we have.  */
7101  else
7102    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7103}
7104
7105/* Alignment in bits the TARGET of an assignment may be assumed to have.  */
7106
7107static unsigned HOST_WIDE_INT
7108target_align (const_tree target)
7109{
7110  /* We might have a chain of nested references with intermediate misaligning
7111     bitfields components, so need to recurse to find out.  */
7112
7113  unsigned HOST_WIDE_INT this_align, outer_align;
7114
7115  switch (TREE_CODE (target))
7116    {
7117    case BIT_FIELD_REF:
7118      return 1;
7119
7120    case COMPONENT_REF:
7121      this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7122      outer_align = target_align (TREE_OPERAND (target, 0));
7123      return MIN (this_align, outer_align);
7124
7125    case ARRAY_REF:
7126    case ARRAY_RANGE_REF:
7127      this_align = TYPE_ALIGN (TREE_TYPE (target));
7128      outer_align = target_align (TREE_OPERAND (target, 0));
7129      return MIN (this_align, outer_align);
7130
7131    CASE_CONVERT:
7132    case NON_LVALUE_EXPR:
7133    case VIEW_CONVERT_EXPR:
7134      this_align = TYPE_ALIGN (TREE_TYPE (target));
7135      outer_align = target_align (TREE_OPERAND (target, 0));
7136      return MAX (this_align, outer_align);
7137
7138    default:
7139      return TYPE_ALIGN (TREE_TYPE (target));
7140    }
7141}
7142
7143
7144/* Given an rtx VALUE that may contain additions and multiplications, return
7145   an equivalent value that just refers to a register, memory, or constant.
7146   This is done by generating instructions to perform the arithmetic and
7147   returning a pseudo-register containing the value.
7148
7149   The returned value may be a REG, SUBREG, MEM or constant.  */
7150
7151rtx
7152force_operand (rtx value, rtx target)
7153{
7154  rtx op1, op2;
7155  /* Use subtarget as the target for operand 0 of a binary operation.  */
7156  rtx subtarget = get_subtarget (target);
7157  enum rtx_code code = GET_CODE (value);
7158
7159  /* Check for subreg applied to an expression produced by loop optimizer.  */
7160  if (code == SUBREG
7161      && !REG_P (SUBREG_REG (value))
7162      && !MEM_P (SUBREG_REG (value)))
7163    {
7164      value
7165	= simplify_gen_subreg (GET_MODE (value),
7166			       force_reg (GET_MODE (SUBREG_REG (value)),
7167					  force_operand (SUBREG_REG (value),
7168							 NULL_RTX)),
7169			       GET_MODE (SUBREG_REG (value)),
7170			       SUBREG_BYTE (value));
7171      code = GET_CODE (value);
7172    }
7173
7174  /* Check for a PIC address load.  */
7175  if ((code == PLUS || code == MINUS)
7176      && XEXP (value, 0) == pic_offset_table_rtx
7177      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7178	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
7179	  || GET_CODE (XEXP (value, 1)) == CONST))
7180    {
7181      if (!subtarget)
7182	subtarget = gen_reg_rtx (GET_MODE (value));
7183      emit_move_insn (subtarget, value);
7184      return subtarget;
7185    }
7186
7187  if (ARITHMETIC_P (value))
7188    {
7189      op2 = XEXP (value, 1);
7190      if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7191	subtarget = 0;
7192      if (code == MINUS && CONST_INT_P (op2))
7193	{
7194	  code = PLUS;
7195	  op2 = negate_rtx (GET_MODE (value), op2);
7196	}
7197
7198      /* Check for an addition with OP2 a constant integer and our first
7199         operand a PLUS of a virtual register and something else.  In that
7200         case, we want to emit the sum of the virtual register and the
7201         constant first and then add the other value.  This allows virtual
7202         register instantiation to simply modify the constant rather than
7203         creating another one around this addition.  */
7204      if (code == PLUS && CONST_INT_P (op2)
7205	  && GET_CODE (XEXP (value, 0)) == PLUS
7206	  && REG_P (XEXP (XEXP (value, 0), 0))
7207	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7208	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7209	{
7210	  rtx temp = expand_simple_binop (GET_MODE (value), code,
7211					  XEXP (XEXP (value, 0), 0), op2,
7212					  subtarget, 0, OPTAB_LIB_WIDEN);
7213	  return expand_simple_binop (GET_MODE (value), code, temp,
7214				      force_operand (XEXP (XEXP (value,
7215								 0), 1), 0),
7216				      target, 0, OPTAB_LIB_WIDEN);
7217	}
7218
7219      op1 = force_operand (XEXP (value, 0), subtarget);
7220      op2 = force_operand (op2, NULL_RTX);
7221      switch (code)
7222	{
7223	case MULT:
7224	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
7225	case DIV:
7226	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
7227	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
7228					target, 1, OPTAB_LIB_WIDEN);
7229	  else
7230	    return expand_divmod (0,
7231				  FLOAT_MODE_P (GET_MODE (value))
7232				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7233				  GET_MODE (value), op1, op2, target, 0);
7234	case MOD:
7235	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7236				target, 0);
7237	case UDIV:
7238	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7239				target, 1);
7240	case UMOD:
7241	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7242				target, 1);
7243	case ASHIFTRT:
7244	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7245				      target, 0, OPTAB_LIB_WIDEN);
7246	default:
7247	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7248				      target, 1, OPTAB_LIB_WIDEN);
7249	}
7250    }
7251  if (UNARY_P (value))
7252    {
7253      if (!target)
7254	target = gen_reg_rtx (GET_MODE (value));
7255      op1 = force_operand (XEXP (value, 0), NULL_RTX);
7256      switch (code)
7257	{
7258	case ZERO_EXTEND:
7259	case SIGN_EXTEND:
7260	case TRUNCATE:
7261	case FLOAT_EXTEND:
7262	case FLOAT_TRUNCATE:
7263	  convert_move (target, op1, code == ZERO_EXTEND);
7264	  return target;
7265
7266	case FIX:
7267	case UNSIGNED_FIX:
7268	  expand_fix (target, op1, code == UNSIGNED_FIX);
7269	  return target;
7270
7271	case FLOAT:
7272	case UNSIGNED_FLOAT:
7273	  expand_float (target, op1, code == UNSIGNED_FLOAT);
7274	  return target;
7275
7276	default:
7277	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7278	}
7279    }
7280
7281#ifdef INSN_SCHEDULING
7282  /* On machines that have insn scheduling, we want all memory reference to be
7283     explicit, so we need to deal with such paradoxical SUBREGs.  */
7284  if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7285    value
7286      = simplify_gen_subreg (GET_MODE (value),
7287			     force_reg (GET_MODE (SUBREG_REG (value)),
7288					force_operand (SUBREG_REG (value),
7289						       NULL_RTX)),
7290			     GET_MODE (SUBREG_REG (value)),
7291			     SUBREG_BYTE (value));
7292#endif
7293
7294  return value;
7295}
7296
7297/* Subroutine of expand_expr: return nonzero iff there is no way that
7298   EXP can reference X, which is being modified.  TOP_P is nonzero if this
7299   call is going to be used to determine whether we need a temporary
7300   for EXP, as opposed to a recursive call to this function.
7301
7302   It is always safe for this routine to return zero since it merely
7303   searches for optimization opportunities.  */
7304
7305int
7306safe_from_p (const_rtx x, tree exp, int top_p)
7307{
7308  rtx exp_rtl = 0;
7309  int i, nops;
7310
7311  if (x == 0
7312      /* If EXP has varying size, we MUST use a target since we currently
7313	 have no way of allocating temporaries of variable size
7314	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7315	 So we assume here that something at a higher level has prevented a
7316	 clash.  This is somewhat bogus, but the best we can do.  Only
7317	 do this when X is BLKmode and when we are at the top level.  */
7318      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7319	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7320	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7321	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7322	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7323	      != INTEGER_CST)
7324	  && GET_MODE (x) == BLKmode)
7325      /* If X is in the outgoing argument area, it is always safe.  */
7326      || (MEM_P (x)
7327	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
7328	      || (GET_CODE (XEXP (x, 0)) == PLUS
7329		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7330    return 1;
7331
7332  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7333     find the underlying pseudo.  */
7334  if (GET_CODE (x) == SUBREG)
7335    {
7336      x = SUBREG_REG (x);
7337      if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7338	return 0;
7339    }
7340
7341  /* Now look at our tree code and possibly recurse.  */
7342  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7343    {
7344    case tcc_declaration:
7345      exp_rtl = DECL_RTL_IF_SET (exp);
7346      break;
7347
7348    case tcc_constant:
7349      return 1;
7350
7351    case tcc_exceptional:
7352      if (TREE_CODE (exp) == TREE_LIST)
7353	{
7354	  while (1)
7355	    {
7356	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7357		return 0;
7358	      exp = TREE_CHAIN (exp);
7359	      if (!exp)
7360		return 1;
7361	      if (TREE_CODE (exp) != TREE_LIST)
7362		return safe_from_p (x, exp, 0);
7363	    }
7364	}
7365      else if (TREE_CODE (exp) == CONSTRUCTOR)
7366	{
7367	  constructor_elt *ce;
7368	  unsigned HOST_WIDE_INT idx;
7369
7370	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7371	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7372		|| !safe_from_p (x, ce->value, 0))
7373	      return 0;
7374	  return 1;
7375	}
7376      else if (TREE_CODE (exp) == ERROR_MARK)
7377	return 1;	/* An already-visited SAVE_EXPR? */
7378      else
7379	return 0;
7380
7381    case tcc_statement:
7382      /* The only case we look at here is the DECL_INITIAL inside a
7383	 DECL_EXPR.  */
7384      return (TREE_CODE (exp) != DECL_EXPR
7385	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7386	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7387	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7388
7389    case tcc_binary:
7390    case tcc_comparison:
7391      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7392	return 0;
7393      /* Fall through.  */
7394
7395    case tcc_unary:
7396      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7397
7398    case tcc_expression:
7399    case tcc_reference:
7400    case tcc_vl_exp:
7401      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7402	 the expression.  If it is set, we conflict iff we are that rtx or
7403	 both are in memory.  Otherwise, we check all operands of the
7404	 expression recursively.  */
7405
7406      switch (TREE_CODE (exp))
7407	{
7408	case ADDR_EXPR:
7409	  /* If the operand is static or we are static, we can't conflict.
7410	     Likewise if we don't conflict with the operand at all.  */
7411	  if (staticp (TREE_OPERAND (exp, 0))
7412	      || TREE_STATIC (exp)
7413	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7414	    return 1;
7415
7416	  /* Otherwise, the only way this can conflict is if we are taking
7417	     the address of a DECL a that address if part of X, which is
7418	     very rare.  */
7419	  exp = TREE_OPERAND (exp, 0);
7420	  if (DECL_P (exp))
7421	    {
7422	      if (!DECL_RTL_SET_P (exp)
7423		  || !MEM_P (DECL_RTL (exp)))
7424		return 0;
7425	      else
7426		exp_rtl = XEXP (DECL_RTL (exp), 0);
7427	    }
7428	  break;
7429
7430	case MEM_REF:
7431	  if (MEM_P (x)
7432	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7433					get_alias_set (exp)))
7434	    return 0;
7435	  break;
7436
7437	case CALL_EXPR:
7438	  /* Assume that the call will clobber all hard registers and
7439	     all of memory.  */
7440	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7441	      || MEM_P (x))
7442	    return 0;
7443	  break;
7444
7445	case WITH_CLEANUP_EXPR:
7446	case CLEANUP_POINT_EXPR:
7447	  /* Lowered by gimplify.c.  */
7448	  gcc_unreachable ();
7449
7450	case SAVE_EXPR:
7451	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7452
7453	default:
7454	  break;
7455	}
7456
7457      /* If we have an rtx, we do not need to scan our operands.  */
7458      if (exp_rtl)
7459	break;
7460
7461      nops = TREE_OPERAND_LENGTH (exp);
7462      for (i = 0; i < nops; i++)
7463	if (TREE_OPERAND (exp, i) != 0
7464	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7465	  return 0;
7466
7467      break;
7468
7469    case tcc_type:
7470      /* Should never get a type here.  */
7471      gcc_unreachable ();
7472    }
7473
7474  /* If we have an rtl, find any enclosed object.  Then see if we conflict
7475     with it.  */
7476  if (exp_rtl)
7477    {
7478      if (GET_CODE (exp_rtl) == SUBREG)
7479	{
7480	  exp_rtl = SUBREG_REG (exp_rtl);
7481	  if (REG_P (exp_rtl)
7482	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7483	    return 0;
7484	}
7485
7486      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7487	 are memory and they conflict.  */
7488      return ! (rtx_equal_p (x, exp_rtl)
7489		|| (MEM_P (x) && MEM_P (exp_rtl)
7490		    && true_dependence (exp_rtl, VOIDmode, x)));
7491    }
7492
7493  /* If we reach here, it is safe.  */
7494  return 1;
7495}
7496
7497
7498/* Return the highest power of two that EXP is known to be a multiple of.
7499   This is used in updating alignment of MEMs in array references.  */
7500
7501unsigned HOST_WIDE_INT
7502highest_pow2_factor (const_tree exp)
7503{
7504  unsigned HOST_WIDE_INT ret;
7505  int trailing_zeros = tree_ctz (exp);
7506  if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7507    return BIGGEST_ALIGNMENT;
7508  ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7509  if (ret > BIGGEST_ALIGNMENT)
7510    return BIGGEST_ALIGNMENT;
7511  return ret;
7512}
7513
7514/* Similar, except that the alignment requirements of TARGET are
7515   taken into account.  Assume it is at least as aligned as its
7516   type, unless it is a COMPONENT_REF in which case the layout of
7517   the structure gives the alignment.  */
7518
7519static unsigned HOST_WIDE_INT
7520highest_pow2_factor_for_target (const_tree target, const_tree exp)
7521{
7522  unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7523  unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7524
7525  return MAX (factor, talign);
7526}
7527
7528#ifdef HAVE_conditional_move
7529/* Convert the tree comparison code TCODE to the rtl one where the
7530   signedness is UNSIGNEDP.  */
7531
7532static enum rtx_code
7533convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7534{
7535  enum rtx_code code;
7536  switch (tcode)
7537    {
7538    case EQ_EXPR:
7539      code = EQ;
7540      break;
7541    case NE_EXPR:
7542      code = NE;
7543      break;
7544    case LT_EXPR:
7545      code = unsignedp ? LTU : LT;
7546      break;
7547    case LE_EXPR:
7548      code = unsignedp ? LEU : LE;
7549      break;
7550    case GT_EXPR:
7551      code = unsignedp ? GTU : GT;
7552      break;
7553    case GE_EXPR:
7554      code = unsignedp ? GEU : GE;
7555      break;
7556    case UNORDERED_EXPR:
7557      code = UNORDERED;
7558      break;
7559    case ORDERED_EXPR:
7560      code = ORDERED;
7561      break;
7562    case UNLT_EXPR:
7563      code = UNLT;
7564      break;
7565    case UNLE_EXPR:
7566      code = UNLE;
7567      break;
7568    case UNGT_EXPR:
7569      code = UNGT;
7570      break;
7571    case UNGE_EXPR:
7572      code = UNGE;
7573      break;
7574    case UNEQ_EXPR:
7575      code = UNEQ;
7576      break;
7577    case LTGT_EXPR:
7578      code = LTGT;
7579      break;
7580
7581    default:
7582      gcc_unreachable ();
7583    }
7584  return code;
7585}
7586#endif
7587
7588/* Subroutine of expand_expr.  Expand the two operands of a binary
7589   expression EXP0 and EXP1 placing the results in OP0 and OP1.
7590   The value may be stored in TARGET if TARGET is nonzero.  The
7591   MODIFIER argument is as documented by expand_expr.  */
7592
7593void
7594expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7595		 enum expand_modifier modifier)
7596{
7597  if (! safe_from_p (target, exp1, 1))
7598    target = 0;
7599  if (operand_equal_p (exp0, exp1, 0))
7600    {
7601      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7602      *op1 = copy_rtx (*op0);
7603    }
7604  else
7605    {
7606      /* If we need to preserve evaluation order, copy exp0 into its own
7607	 temporary variable so that it can't be clobbered by exp1.  */
7608      if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7609	exp0 = save_expr (exp0);
7610      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7611      *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7612    }
7613}
7614
7615
7616/* Return a MEM that contains constant EXP.  DEFER is as for
7617   output_constant_def and MODIFIER is as for expand_expr.  */
7618
7619static rtx
7620expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7621{
7622  rtx mem;
7623
7624  mem = output_constant_def (exp, defer);
7625  if (modifier != EXPAND_INITIALIZER)
7626    mem = use_anchored_address (mem);
7627  return mem;
7628}
7629
7630/* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7631   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7632
7633static rtx
7634expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7635		         enum expand_modifier modifier, addr_space_t as)
7636{
7637  rtx result, subtarget;
7638  tree inner, offset;
7639  HOST_WIDE_INT bitsize, bitpos;
7640  int volatilep, unsignedp;
7641  machine_mode mode1;
7642
7643  /* If we are taking the address of a constant and are at the top level,
7644     we have to use output_constant_def since we can't call force_const_mem
7645     at top level.  */
7646  /* ??? This should be considered a front-end bug.  We should not be
7647     generating ADDR_EXPR of something that isn't an LVALUE.  The only
7648     exception here is STRING_CST.  */
7649  if (CONSTANT_CLASS_P (exp))
7650    {
7651      result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7652      if (modifier < EXPAND_SUM)
7653	result = force_operand (result, target);
7654      return result;
7655    }
7656
7657  /* Everything must be something allowed by is_gimple_addressable.  */
7658  switch (TREE_CODE (exp))
7659    {
7660    case INDIRECT_REF:
7661      /* This case will happen via recursion for &a->b.  */
7662      return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7663
7664    case MEM_REF:
7665      {
7666	tree tem = TREE_OPERAND (exp, 0);
7667	if (!integer_zerop (TREE_OPERAND (exp, 1)))
7668	  tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7669	return expand_expr (tem, target, tmode, modifier);
7670      }
7671
7672    case CONST_DECL:
7673      /* Expand the initializer like constants above.  */
7674      result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7675					   0, modifier), 0);
7676      if (modifier < EXPAND_SUM)
7677	result = force_operand (result, target);
7678      return result;
7679
7680    case REALPART_EXPR:
7681      /* The real part of the complex number is always first, therefore
7682	 the address is the same as the address of the parent object.  */
7683      offset = 0;
7684      bitpos = 0;
7685      inner = TREE_OPERAND (exp, 0);
7686      break;
7687
7688    case IMAGPART_EXPR:
7689      /* The imaginary part of the complex number is always second.
7690	 The expression is therefore always offset by the size of the
7691	 scalar type.  */
7692      offset = 0;
7693      bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7694      inner = TREE_OPERAND (exp, 0);
7695      break;
7696
7697    case COMPOUND_LITERAL_EXPR:
7698      /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7699	 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7700	 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7701	 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7702	 the initializers aren't gimplified.  */
7703      if (COMPOUND_LITERAL_EXPR_DECL (exp)
7704	  && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7705	return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7706					target, tmode, modifier, as);
7707      /* FALLTHRU */
7708    default:
7709      /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7710	 expand_expr, as that can have various side effects; LABEL_DECLs for
7711	 example, may not have their DECL_RTL set yet.  Expand the rtl of
7712	 CONSTRUCTORs too, which should yield a memory reference for the
7713	 constructor's contents.  Assume language specific tree nodes can
7714	 be expanded in some interesting way.  */
7715      gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7716      if (DECL_P (exp)
7717	  || TREE_CODE (exp) == CONSTRUCTOR
7718	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7719	{
7720	  result = expand_expr (exp, target, tmode,
7721				modifier == EXPAND_INITIALIZER
7722				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7723
7724	  /* If the DECL isn't in memory, then the DECL wasn't properly
7725	     marked TREE_ADDRESSABLE, which will be either a front-end
7726	     or a tree optimizer bug.  */
7727
7728	  if (TREE_ADDRESSABLE (exp)
7729	      && ! MEM_P (result)
7730	      && ! targetm.calls.allocate_stack_slots_for_args ())
7731	    {
7732	      error ("local frame unavailable (naked function?)");
7733	      return result;
7734	    }
7735	  else
7736	    gcc_assert (MEM_P (result));
7737	  result = XEXP (result, 0);
7738
7739	  /* ??? Is this needed anymore?  */
7740	  if (DECL_P (exp))
7741	    TREE_USED (exp) = 1;
7742
7743	  if (modifier != EXPAND_INITIALIZER
7744	      && modifier != EXPAND_CONST_ADDRESS
7745	      && modifier != EXPAND_SUM)
7746	    result = force_operand (result, target);
7747	  return result;
7748	}
7749
7750      /* Pass FALSE as the last argument to get_inner_reference although
7751	 we are expanding to RTL.  The rationale is that we know how to
7752	 handle "aligning nodes" here: we can just bypass them because
7753	 they won't change the final object whose address will be returned
7754	 (they actually exist only for that purpose).  */
7755      inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7756				   &mode1, &unsignedp, &volatilep, false);
7757      break;
7758    }
7759
7760  /* We must have made progress.  */
7761  gcc_assert (inner != exp);
7762
7763  subtarget = offset || bitpos ? NULL_RTX : target;
7764  /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7765     inner alignment, force the inner to be sufficiently aligned.  */
7766  if (CONSTANT_CLASS_P (inner)
7767      && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7768    {
7769      inner = copy_node (inner);
7770      TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7771      TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7772      TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7773    }
7774  result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7775
7776  if (offset)
7777    {
7778      rtx tmp;
7779
7780      if (modifier != EXPAND_NORMAL)
7781	result = force_operand (result, NULL);
7782      tmp = expand_expr (offset, NULL_RTX, tmode,
7783			 modifier == EXPAND_INITIALIZER
7784			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7785
7786      /* expand_expr is allowed to return an object in a mode other
7787	 than TMODE.  If it did, we need to convert.  */
7788      if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7789	tmp = convert_modes (tmode, GET_MODE (tmp),
7790			     tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7791      result = convert_memory_address_addr_space (tmode, result, as);
7792      tmp = convert_memory_address_addr_space (tmode, tmp, as);
7793
7794      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7795	result = simplify_gen_binary (PLUS, tmode, result, tmp);
7796      else
7797	{
7798	  subtarget = bitpos ? NULL_RTX : target;
7799	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7800					1, OPTAB_LIB_WIDEN);
7801	}
7802    }
7803
7804  if (bitpos)
7805    {
7806      /* Someone beforehand should have rejected taking the address
7807	 of such an object.  */
7808      gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7809
7810      result = convert_memory_address_addr_space (tmode, result, as);
7811      result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7812      if (modifier < EXPAND_SUM)
7813	result = force_operand (result, target);
7814    }
7815
7816  return result;
7817}
7818
7819/* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
7820   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7821
7822static rtx
7823expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7824		       enum expand_modifier modifier)
7825{
7826  addr_space_t as = ADDR_SPACE_GENERIC;
7827  machine_mode address_mode = Pmode;
7828  machine_mode pointer_mode = ptr_mode;
7829  machine_mode rmode;
7830  rtx result;
7831
7832  /* Target mode of VOIDmode says "whatever's natural".  */
7833  if (tmode == VOIDmode)
7834    tmode = TYPE_MODE (TREE_TYPE (exp));
7835
7836  if (POINTER_TYPE_P (TREE_TYPE (exp)))
7837    {
7838      as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7839      address_mode = targetm.addr_space.address_mode (as);
7840      pointer_mode = targetm.addr_space.pointer_mode (as);
7841    }
7842
7843  /* We can get called with some Weird Things if the user does silliness
7844     like "(short) &a".  In that case, convert_memory_address won't do
7845     the right thing, so ignore the given target mode.  */
7846  if (tmode != address_mode && tmode != pointer_mode)
7847    tmode = address_mode;
7848
7849  result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7850				    tmode, modifier, as);
7851
7852  /* Despite expand_expr claims concerning ignoring TMODE when not
7853     strictly convenient, stuff breaks if we don't honor it.  Note
7854     that combined with the above, we only do this for pointer modes.  */
7855  rmode = GET_MODE (result);
7856  if (rmode == VOIDmode)
7857    rmode = tmode;
7858  if (rmode != tmode)
7859    result = convert_memory_address_addr_space (tmode, result, as);
7860
7861  return result;
7862}
7863
7864/* Generate code for computing CONSTRUCTOR EXP.
7865   An rtx for the computed value is returned.  If AVOID_TEMP_MEM
7866   is TRUE, instead of creating a temporary variable in memory
7867   NULL is returned and the caller needs to handle it differently.  */
7868
7869static rtx
7870expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7871		    bool avoid_temp_mem)
7872{
7873  tree type = TREE_TYPE (exp);
7874  machine_mode mode = TYPE_MODE (type);
7875
7876  /* Try to avoid creating a temporary at all.  This is possible
7877     if all of the initializer is zero.
7878     FIXME: try to handle all [0..255] initializers we can handle
7879     with memset.  */
7880  if (TREE_STATIC (exp)
7881      && !TREE_ADDRESSABLE (exp)
7882      && target != 0 && mode == BLKmode
7883      && all_zeros_p (exp))
7884    {
7885      clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7886      return target;
7887    }
7888
7889  /* All elts simple constants => refer to a constant in memory.  But
7890     if this is a non-BLKmode mode, let it store a field at a time
7891     since that should make a CONST_INT, CONST_WIDE_INT or
7892     CONST_DOUBLE when we fold.  Likewise, if we have a target we can
7893     use, it is best to store directly into the target unless the type
7894     is large enough that memcpy will be used.  If we are making an
7895     initializer and all operands are constant, put it in memory as
7896     well.
7897
7898     FIXME: Avoid trying to fill vector constructors piece-meal.
7899     Output them with output_constant_def below unless we're sure
7900     they're zeros.  This should go away when vector initializers
7901     are treated like VECTOR_CST instead of arrays.  */
7902  if ((TREE_STATIC (exp)
7903       && ((mode == BLKmode
7904	    && ! (target != 0 && safe_from_p (target, exp, 1)))
7905		  || TREE_ADDRESSABLE (exp)
7906		  || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7907		      && (! can_move_by_pieces
7908				     (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7909				      TYPE_ALIGN (type)))
7910		      && ! mostly_zeros_p (exp))))
7911      || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7912	  && TREE_CONSTANT (exp)))
7913    {
7914      rtx constructor;
7915
7916      if (avoid_temp_mem)
7917	return NULL_RTX;
7918
7919      constructor = expand_expr_constant (exp, 1, modifier);
7920
7921      if (modifier != EXPAND_CONST_ADDRESS
7922	  && modifier != EXPAND_INITIALIZER
7923	  && modifier != EXPAND_SUM)
7924	constructor = validize_mem (constructor);
7925
7926      return constructor;
7927    }
7928
7929  /* Handle calls that pass values in multiple non-contiguous
7930     locations.  The Irix 6 ABI has examples of this.  */
7931  if (target == 0 || ! safe_from_p (target, exp, 1)
7932      || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7933    {
7934      if (avoid_temp_mem)
7935	return NULL_RTX;
7936
7937      target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7938    }
7939
7940  store_constructor (exp, target, 0, int_expr_size (exp));
7941  return target;
7942}
7943
7944
7945/* expand_expr: generate code for computing expression EXP.
7946   An rtx for the computed value is returned.  The value is never null.
7947   In the case of a void EXP, const0_rtx is returned.
7948
7949   The value may be stored in TARGET if TARGET is nonzero.
7950   TARGET is just a suggestion; callers must assume that
7951   the rtx returned may not be the same as TARGET.
7952
7953   If TARGET is CONST0_RTX, it means that the value will be ignored.
7954
7955   If TMODE is not VOIDmode, it suggests generating the
7956   result in mode TMODE.  But this is done only when convenient.
7957   Otherwise, TMODE is ignored and the value generated in its natural mode.
7958   TMODE is just a suggestion; callers must assume that
7959   the rtx returned may not have mode TMODE.
7960
7961   Note that TARGET may have neither TMODE nor MODE.  In that case, it
7962   probably will not be used.
7963
7964   If MODIFIER is EXPAND_SUM then when EXP is an addition
7965   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7966   or a nest of (PLUS ...) and (MINUS ...) where the terms are
7967   products as above, or REG or MEM, or constant.
7968   Ordinarily in such cases we would output mul or add instructions
7969   and then return a pseudo reg containing the sum.
7970
7971   EXPAND_INITIALIZER is much like EXPAND_SUM except that
7972   it also marks a label as absolutely required (it can't be dead).
7973   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7974   This is used for outputting expressions used in initializers.
7975
7976   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7977   with a constant address even if that address is not normally legitimate.
7978   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7979
7980   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7981   a call parameter.  Such targets require special care as we haven't yet
7982   marked TARGET so that it's safe from being trashed by libcalls.  We
7983   don't want to use TARGET for anything but the final result;
7984   Intermediate values must go elsewhere.   Additionally, calls to
7985   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7986
7987   If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7988   address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7989   DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
7990   COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7991   recursively.
7992
7993   If INNER_REFERENCE_P is true, we are expanding an inner reference.
7994   In this case, we don't adjust a returned MEM rtx that wouldn't be
7995   sufficiently aligned for its mode; instead, it's up to the caller
7996   to deal with it afterwards.  This is used to make sure that unaligned
7997   base objects for which out-of-bounds accesses are supported, for
7998   example record types with trailing arrays, aren't realigned behind
7999   the back of the caller.
8000   The normal operating mode is to pass FALSE for this parameter.  */
8001
8002rtx
8003expand_expr_real (tree exp, rtx target, machine_mode tmode,
8004		  enum expand_modifier modifier, rtx *alt_rtl,
8005		  bool inner_reference_p)
8006{
8007  rtx ret;
8008
8009  /* Handle ERROR_MARK before anybody tries to access its type.  */
8010  if (TREE_CODE (exp) == ERROR_MARK
8011      || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8012    {
8013      ret = CONST0_RTX (tmode);
8014      return ret ? ret : const0_rtx;
8015    }
8016
8017  ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8018			    inner_reference_p);
8019  return ret;
8020}
8021
8022/* Try to expand the conditional expression which is represented by
8023   TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves.  If succeseds
8024   return the rtl reg which repsents the result.  Otherwise return
8025   NULL_RTL.  */
8026
8027static rtx
8028expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8029			      tree treeop1 ATTRIBUTE_UNUSED,
8030			      tree treeop2 ATTRIBUTE_UNUSED)
8031{
8032#ifdef HAVE_conditional_move
8033  rtx insn;
8034  rtx op00, op01, op1, op2;
8035  enum rtx_code comparison_code;
8036  machine_mode comparison_mode;
8037  gimple srcstmt;
8038  rtx temp;
8039  tree type = TREE_TYPE (treeop1);
8040  int unsignedp = TYPE_UNSIGNED (type);
8041  machine_mode mode = TYPE_MODE (type);
8042  machine_mode orig_mode = mode;
8043
8044  /* If we cannot do a conditional move on the mode, try doing it
8045     with the promoted mode. */
8046  if (!can_conditionally_move_p (mode))
8047    {
8048      mode = promote_mode (type, mode, &unsignedp);
8049      if (!can_conditionally_move_p (mode))
8050	return NULL_RTX;
8051      temp = assign_temp (type, 0, 0); /* Use promoted mode for temp.  */
8052    }
8053  else
8054    temp = assign_temp (type, 0, 1);
8055
8056  start_sequence ();
8057  expand_operands (treeop1, treeop2,
8058		   temp, &op1, &op2, EXPAND_NORMAL);
8059
8060  if (TREE_CODE (treeop0) == SSA_NAME
8061      && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8062    {
8063      tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8064      enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8065      op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8066      op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8067      comparison_mode = TYPE_MODE (type);
8068      unsignedp = TYPE_UNSIGNED (type);
8069      comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8070    }
8071  else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8072    {
8073      tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8074      enum tree_code cmpcode = TREE_CODE (treeop0);
8075      op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8076      op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8077      unsignedp = TYPE_UNSIGNED (type);
8078      comparison_mode = TYPE_MODE (type);
8079      comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8080    }
8081  else
8082    {
8083      op00 = expand_normal (treeop0);
8084      op01 = const0_rtx;
8085      comparison_code = NE;
8086      comparison_mode = GET_MODE (op00);
8087      if (comparison_mode == VOIDmode)
8088	comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8089    }
8090
8091  if (GET_MODE (op1) != mode)
8092    op1 = gen_lowpart (mode, op1);
8093
8094  if (GET_MODE (op2) != mode)
8095    op2 = gen_lowpart (mode, op2);
8096
8097  /* Try to emit the conditional move.  */
8098  insn = emit_conditional_move (temp, comparison_code,
8099				op00, op01, comparison_mode,
8100				op1, op2, mode,
8101				unsignedp);
8102
8103  /* If we could do the conditional move, emit the sequence,
8104     and return.  */
8105  if (insn)
8106    {
8107      rtx_insn *seq = get_insns ();
8108      end_sequence ();
8109      emit_insn (seq);
8110      return convert_modes (orig_mode, mode, temp, 0);
8111    }
8112
8113  /* Otherwise discard the sequence and fall back to code with
8114     branches.  */
8115  end_sequence ();
8116#endif
8117  return NULL_RTX;
8118}
8119
8120rtx
8121expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8122		    enum expand_modifier modifier)
8123{
8124  rtx op0, op1, op2, temp;
8125  tree type;
8126  int unsignedp;
8127  machine_mode mode;
8128  enum tree_code code = ops->code;
8129  optab this_optab;
8130  rtx subtarget, original_target;
8131  int ignore;
8132  bool reduce_bit_field;
8133  location_t loc = ops->location;
8134  tree treeop0, treeop1, treeop2;
8135#define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
8136				 ? reduce_to_bit_field_precision ((expr), \
8137								  target, \
8138								  type)	  \
8139				 : (expr))
8140
8141  type = ops->type;
8142  mode = TYPE_MODE (type);
8143  unsignedp = TYPE_UNSIGNED (type);
8144
8145  treeop0 = ops->op0;
8146  treeop1 = ops->op1;
8147  treeop2 = ops->op2;
8148
8149  /* We should be called only on simple (binary or unary) expressions,
8150     exactly those that are valid in gimple expressions that aren't
8151     GIMPLE_SINGLE_RHS (or invalid).  */
8152  gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8153	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8154	      || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8155
8156  ignore = (target == const0_rtx
8157	    || ((CONVERT_EXPR_CODE_P (code)
8158		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8159		&& TREE_CODE (type) == VOID_TYPE));
8160
8161  /* We should be called only if we need the result.  */
8162  gcc_assert (!ignore);
8163
8164  /* An operation in what may be a bit-field type needs the
8165     result to be reduced to the precision of the bit-field type,
8166     which is narrower than that of the type's mode.  */
8167  reduce_bit_field = (INTEGRAL_TYPE_P (type)
8168		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8169
8170  if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8171    target = 0;
8172
8173  /* Use subtarget as the target for operand 0 of a binary operation.  */
8174  subtarget = get_subtarget (target);
8175  original_target = target;
8176
8177  switch (code)
8178    {
8179    case NON_LVALUE_EXPR:
8180    case PAREN_EXPR:
8181    CASE_CONVERT:
8182      if (treeop0 == error_mark_node)
8183	return const0_rtx;
8184
8185      if (TREE_CODE (type) == UNION_TYPE)
8186	{
8187	  tree valtype = TREE_TYPE (treeop0);
8188
8189	  /* If both input and output are BLKmode, this conversion isn't doing
8190	     anything except possibly changing memory attribute.  */
8191	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8192	    {
8193	      rtx result = expand_expr (treeop0, target, tmode,
8194					modifier);
8195
8196	      result = copy_rtx (result);
8197	      set_mem_attributes (result, type, 0);
8198	      return result;
8199	    }
8200
8201	  if (target == 0)
8202	    {
8203	      if (TYPE_MODE (type) != BLKmode)
8204		target = gen_reg_rtx (TYPE_MODE (type));
8205	      else
8206		target = assign_temp (type, 1, 1);
8207	    }
8208
8209	  if (MEM_P (target))
8210	    /* Store data into beginning of memory target.  */
8211	    store_expr (treeop0,
8212			adjust_address (target, TYPE_MODE (valtype), 0),
8213			modifier == EXPAND_STACK_PARM,
8214			false);
8215
8216	  else
8217	    {
8218	      gcc_assert (REG_P (target));
8219
8220	      /* Store this field into a union of the proper type.  */
8221	      store_field (target,
8222			   MIN ((int_size_in_bytes (TREE_TYPE
8223						    (treeop0))
8224				 * BITS_PER_UNIT),
8225				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8226			   0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8227	    }
8228
8229	  /* Return the entire union.  */
8230	  return target;
8231	}
8232
8233      if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8234	{
8235	  op0 = expand_expr (treeop0, target, VOIDmode,
8236			     modifier);
8237
8238	  /* If the signedness of the conversion differs and OP0 is
8239	     a promoted SUBREG, clear that indication since we now
8240	     have to do the proper extension.  */
8241	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8242	      && GET_CODE (op0) == SUBREG)
8243	    SUBREG_PROMOTED_VAR_P (op0) = 0;
8244
8245	  return REDUCE_BIT_FIELD (op0);
8246	}
8247
8248      op0 = expand_expr (treeop0, NULL_RTX, mode,
8249			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8250      if (GET_MODE (op0) == mode)
8251	;
8252
8253      /* If OP0 is a constant, just convert it into the proper mode.  */
8254      else if (CONSTANT_P (op0))
8255	{
8256	  tree inner_type = TREE_TYPE (treeop0);
8257	  machine_mode inner_mode = GET_MODE (op0);
8258
8259	  if (inner_mode == VOIDmode)
8260	    inner_mode = TYPE_MODE (inner_type);
8261
8262	  if (modifier == EXPAND_INITIALIZER)
8263	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
8264				       subreg_lowpart_offset (mode,
8265							      inner_mode));
8266	  else
8267	    op0=  convert_modes (mode, inner_mode, op0,
8268				 TYPE_UNSIGNED (inner_type));
8269	}
8270
8271      else if (modifier == EXPAND_INITIALIZER)
8272	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8273
8274      else if (target == 0)
8275	op0 = convert_to_mode (mode, op0,
8276			       TYPE_UNSIGNED (TREE_TYPE
8277					      (treeop0)));
8278      else
8279	{
8280	  convert_move (target, op0,
8281			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8282	  op0 = target;
8283	}
8284
8285      return REDUCE_BIT_FIELD (op0);
8286
8287    case ADDR_SPACE_CONVERT_EXPR:
8288      {
8289	tree treeop0_type = TREE_TYPE (treeop0);
8290	addr_space_t as_to;
8291	addr_space_t as_from;
8292
8293	gcc_assert (POINTER_TYPE_P (type));
8294	gcc_assert (POINTER_TYPE_P (treeop0_type));
8295
8296	as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8297	as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8298
8299        /* Conversions between pointers to the same address space should
8300	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
8301	gcc_assert (as_to != as_from);
8302
8303        /* Ask target code to handle conversion between pointers
8304	   to overlapping address spaces.  */
8305	if (targetm.addr_space.subset_p (as_to, as_from)
8306	    || targetm.addr_space.subset_p (as_from, as_to))
8307	  {
8308	    op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8309	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8310	    gcc_assert (op0);
8311	    return op0;
8312	  }
8313
8314	/* For disjoint address spaces, converting anything but
8315	   a null pointer invokes undefined behaviour.  We simply
8316	   always return a null pointer here.  */
8317	return CONST0_RTX (mode);
8318      }
8319
8320    case POINTER_PLUS_EXPR:
8321      /* Even though the sizetype mode and the pointer's mode can be different
8322         expand is able to handle this correctly and get the correct result out
8323         of the PLUS_EXPR code.  */
8324      /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8325         if sizetype precision is smaller than pointer precision.  */
8326      if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8327	treeop1 = fold_convert_loc (loc, type,
8328				    fold_convert_loc (loc, ssizetype,
8329						      treeop1));
8330      /* If sizetype precision is larger than pointer precision, truncate the
8331	 offset to have matching modes.  */
8332      else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8333	treeop1 = fold_convert_loc (loc, type, treeop1);
8334
8335    case PLUS_EXPR:
8336      /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8337	 something else, make sure we add the register to the constant and
8338	 then to the other thing.  This case can occur during strength
8339	 reduction and doing it this way will produce better code if the
8340	 frame pointer or argument pointer is eliminated.
8341
8342	 fold-const.c will ensure that the constant is always in the inner
8343	 PLUS_EXPR, so the only case we need to do anything about is if
8344	 sp, ap, or fp is our second argument, in which case we must swap
8345	 the innermost first argument and our second argument.  */
8346
8347      if (TREE_CODE (treeop0) == PLUS_EXPR
8348	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8349	  && TREE_CODE (treeop1) == VAR_DECL
8350	  && (DECL_RTL (treeop1) == frame_pointer_rtx
8351	      || DECL_RTL (treeop1) == stack_pointer_rtx
8352	      || DECL_RTL (treeop1) == arg_pointer_rtx))
8353	{
8354	  gcc_unreachable ();
8355	}
8356
8357      /* If the result is to be ptr_mode and we are adding an integer to
8358	 something, we might be forming a constant.  So try to use
8359	 plus_constant.  If it produces a sum and we can't accept it,
8360	 use force_operand.  This allows P = &ARR[const] to generate
8361	 efficient code on machines where a SYMBOL_REF is not a valid
8362	 address.
8363
8364	 If this is an EXPAND_SUM call, always return the sum.  */
8365      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8366	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8367	{
8368	  if (modifier == EXPAND_STACK_PARM)
8369	    target = 0;
8370	  if (TREE_CODE (treeop0) == INTEGER_CST
8371	      && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8372	      && TREE_CONSTANT (treeop1))
8373	    {
8374	      rtx constant_part;
8375	      HOST_WIDE_INT wc;
8376	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8377
8378	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
8379				 EXPAND_SUM);
8380	      /* Use wi::shwi to ensure that the constant is
8381		 truncated according to the mode of OP1, then sign extended
8382		 to a HOST_WIDE_INT.  Using the constant directly can result
8383		 in non-canonical RTL in a 64x32 cross compile.  */
8384	      wc = TREE_INT_CST_LOW (treeop0);
8385	      constant_part =
8386		immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8387	      op1 = plus_constant (mode, op1, INTVAL (constant_part));
8388	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8389		op1 = force_operand (op1, target);
8390	      return REDUCE_BIT_FIELD (op1);
8391	    }
8392
8393	  else if (TREE_CODE (treeop1) == INTEGER_CST
8394		   && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8395		   && TREE_CONSTANT (treeop0))
8396	    {
8397	      rtx constant_part;
8398	      HOST_WIDE_INT wc;
8399	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8400
8401	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
8402				 (modifier == EXPAND_INITIALIZER
8403				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8404	      if (! CONSTANT_P (op0))
8405		{
8406		  op1 = expand_expr (treeop1, NULL_RTX,
8407				     VOIDmode, modifier);
8408		  /* Return a PLUS if modifier says it's OK.  */
8409		  if (modifier == EXPAND_SUM
8410		      || modifier == EXPAND_INITIALIZER)
8411		    return simplify_gen_binary (PLUS, mode, op0, op1);
8412		  goto binop2;
8413		}
8414	      /* Use wi::shwi to ensure that the constant is
8415		 truncated according to the mode of OP1, then sign extended
8416		 to a HOST_WIDE_INT.  Using the constant directly can result
8417		 in non-canonical RTL in a 64x32 cross compile.  */
8418	      wc = TREE_INT_CST_LOW (treeop1);
8419	      constant_part
8420		= immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8421	      op0 = plus_constant (mode, op0, INTVAL (constant_part));
8422	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8423		op0 = force_operand (op0, target);
8424	      return REDUCE_BIT_FIELD (op0);
8425	    }
8426	}
8427
8428      /* Use TER to expand pointer addition of a negated value
8429	 as pointer subtraction.  */
8430      if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8431	   || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8432	       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8433	  && TREE_CODE (treeop1) == SSA_NAME
8434	  && TYPE_MODE (TREE_TYPE (treeop0))
8435	     == TYPE_MODE (TREE_TYPE (treeop1)))
8436	{
8437	  gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8438	  if (def)
8439	    {
8440	      treeop1 = gimple_assign_rhs1 (def);
8441	      code = MINUS_EXPR;
8442	      goto do_minus;
8443	    }
8444	}
8445
8446      /* No sense saving up arithmetic to be done
8447	 if it's all in the wrong mode to form part of an address.
8448	 And force_operand won't know whether to sign-extend or
8449	 zero-extend.  */
8450      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8451	  || mode != ptr_mode)
8452	{
8453	  expand_operands (treeop0, treeop1,
8454			   subtarget, &op0, &op1, EXPAND_NORMAL);
8455	  if (op0 == const0_rtx)
8456	    return op1;
8457	  if (op1 == const0_rtx)
8458	    return op0;
8459	  goto binop2;
8460	}
8461
8462      expand_operands (treeop0, treeop1,
8463		       subtarget, &op0, &op1, modifier);
8464      return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8465
8466    case MINUS_EXPR:
8467    do_minus:
8468      /* For initializers, we are allowed to return a MINUS of two
8469	 symbolic constants.  Here we handle all cases when both operands
8470	 are constant.  */
8471      /* Handle difference of two symbolic constants,
8472	 for the sake of an initializer.  */
8473      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8474	  && really_constant_p (treeop0)
8475	  && really_constant_p (treeop1))
8476	{
8477	  expand_operands (treeop0, treeop1,
8478			   NULL_RTX, &op0, &op1, modifier);
8479
8480	  /* If the last operand is a CONST_INT, use plus_constant of
8481	     the negated constant.  Else make the MINUS.  */
8482	  if (CONST_INT_P (op1))
8483	    return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8484						    -INTVAL (op1)));
8485	  else
8486	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8487	}
8488
8489      /* No sense saving up arithmetic to be done
8490	 if it's all in the wrong mode to form part of an address.
8491	 And force_operand won't know whether to sign-extend or
8492	 zero-extend.  */
8493      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8494	  || mode != ptr_mode)
8495	goto binop;
8496
8497      expand_operands (treeop0, treeop1,
8498		       subtarget, &op0, &op1, modifier);
8499
8500      /* Convert A - const to A + (-const).  */
8501      if (CONST_INT_P (op1))
8502	{
8503	  op1 = negate_rtx (mode, op1);
8504	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8505	}
8506
8507      goto binop2;
8508
8509    case WIDEN_MULT_PLUS_EXPR:
8510    case WIDEN_MULT_MINUS_EXPR:
8511      expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8512      op2 = expand_normal (treeop2);
8513      target = expand_widen_pattern_expr (ops, op0, op1, op2,
8514					  target, unsignedp);
8515      return target;
8516
8517    case WIDEN_MULT_EXPR:
8518      /* If first operand is constant, swap them.
8519	 Thus the following special case checks need only
8520	 check the second operand.  */
8521      if (TREE_CODE (treeop0) == INTEGER_CST)
8522	{
8523	  tree t1 = treeop0;
8524	  treeop0 = treeop1;
8525	  treeop1 = t1;
8526	}
8527
8528      /* First, check if we have a multiplication of one signed and one
8529	 unsigned operand.  */
8530      if (TREE_CODE (treeop1) != INTEGER_CST
8531	  && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8532	      != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8533	{
8534	  machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8535	  this_optab = usmul_widen_optab;
8536	  if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8537		!= CODE_FOR_nothing)
8538	    {
8539	      if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8540		expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8541				 EXPAND_NORMAL);
8542	      else
8543		expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8544				 EXPAND_NORMAL);
8545	      /* op0 and op1 might still be constant, despite the above
8546		 != INTEGER_CST check.  Handle it.  */
8547	      if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8548		{
8549		  op0 = convert_modes (innermode, mode, op0, true);
8550		  op1 = convert_modes (innermode, mode, op1, false);
8551		  return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8552							target, unsignedp));
8553		}
8554	      goto binop3;
8555	    }
8556	}
8557      /* Check for a multiplication with matching signedness.  */
8558      else if ((TREE_CODE (treeop1) == INTEGER_CST
8559		&& int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8560	       || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8561		   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8562	{
8563	  tree op0type = TREE_TYPE (treeop0);
8564	  machine_mode innermode = TYPE_MODE (op0type);
8565	  bool zextend_p = TYPE_UNSIGNED (op0type);
8566	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8567	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8568
8569	  if (TREE_CODE (treeop0) != INTEGER_CST)
8570	    {
8571	      if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8572		    != CODE_FOR_nothing)
8573		{
8574		  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8575				   EXPAND_NORMAL);
8576		  /* op0 and op1 might still be constant, despite the above
8577		     != INTEGER_CST check.  Handle it.  */
8578		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8579		    {
8580		     widen_mult_const:
8581		      op0 = convert_modes (innermode, mode, op0, zextend_p);
8582		      op1
8583			= convert_modes (innermode, mode, op1,
8584					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8585		      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8586							    target,
8587							    unsignedp));
8588		    }
8589		  temp = expand_widening_mult (mode, op0, op1, target,
8590					       unsignedp, this_optab);
8591		  return REDUCE_BIT_FIELD (temp);
8592		}
8593	      if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8594		    != CODE_FOR_nothing
8595		  && innermode == word_mode)
8596		{
8597		  rtx htem, hipart;
8598		  op0 = expand_normal (treeop0);
8599		  if (TREE_CODE (treeop1) == INTEGER_CST)
8600		    op1 = convert_modes (innermode, mode,
8601					 expand_normal (treeop1),
8602					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8603		  else
8604		    op1 = expand_normal (treeop1);
8605		  /* op0 and op1 might still be constant, despite the above
8606		     != INTEGER_CST check.  Handle it.  */
8607		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8608		    goto widen_mult_const;
8609		  temp = expand_binop (mode, other_optab, op0, op1, target,
8610				       unsignedp, OPTAB_LIB_WIDEN);
8611		  hipart = gen_highpart (innermode, temp);
8612		  htem = expand_mult_highpart_adjust (innermode, hipart,
8613						      op0, op1, hipart,
8614						      zextend_p);
8615		  if (htem != hipart)
8616		    emit_move_insn (hipart, htem);
8617		  return REDUCE_BIT_FIELD (temp);
8618		}
8619	    }
8620	}
8621      treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8622      treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8623      expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8624      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8625
8626    case FMA_EXPR:
8627      {
8628	optab opt = fma_optab;
8629	gimple def0, def2;
8630
8631	/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8632	   call.  */
8633	if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8634	  {
8635	    tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8636	    tree call_expr;
8637
8638	    gcc_assert (fn != NULL_TREE);
8639	    call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8640	    return expand_builtin (call_expr, target, subtarget, mode, false);
8641	  }
8642
8643	def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8644	/* The multiplication is commutative - look at its 2nd operand
8645	   if the first isn't fed by a negate.  */
8646	if (!def0)
8647	  {
8648	    def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8649	    /* Swap operands if the 2nd operand is fed by a negate.  */
8650	    if (def0)
8651	      {
8652		tree tem = treeop0;
8653		treeop0 = treeop1;
8654		treeop1 = tem;
8655	      }
8656	  }
8657	def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8658
8659	op0 = op2 = NULL;
8660
8661	if (def0 && def2
8662	    && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8663	  {
8664	    opt = fnms_optab;
8665	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8666	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8667	  }
8668	else if (def0
8669		 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8670	  {
8671	    opt = fnma_optab;
8672	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8673	  }
8674	else if (def2
8675		 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8676	  {
8677	    opt = fms_optab;
8678	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8679	  }
8680
8681	if (op0 == NULL)
8682	  op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8683	if (op2 == NULL)
8684	  op2 = expand_normal (treeop2);
8685	op1 = expand_normal (treeop1);
8686
8687	return expand_ternary_op (TYPE_MODE (type), opt,
8688				  op0, op1, op2, target, 0);
8689      }
8690
8691    case MULT_EXPR:
8692      /* If this is a fixed-point operation, then we cannot use the code
8693	 below because "expand_mult" doesn't support sat/no-sat fixed-point
8694         multiplications.   */
8695      if (ALL_FIXED_POINT_MODE_P (mode))
8696	goto binop;
8697
8698      /* If first operand is constant, swap them.
8699	 Thus the following special case checks need only
8700	 check the second operand.  */
8701      if (TREE_CODE (treeop0) == INTEGER_CST)
8702	{
8703	  tree t1 = treeop0;
8704	  treeop0 = treeop1;
8705	  treeop1 = t1;
8706	}
8707
8708      /* Attempt to return something suitable for generating an
8709	 indexed address, for machines that support that.  */
8710
8711      if (modifier == EXPAND_SUM && mode == ptr_mode
8712	  && tree_fits_shwi_p (treeop1))
8713	{
8714	  tree exp1 = treeop1;
8715
8716	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
8717			     EXPAND_SUM);
8718
8719	  if (!REG_P (op0))
8720	    op0 = force_operand (op0, NULL_RTX);
8721	  if (!REG_P (op0))
8722	    op0 = copy_to_mode_reg (mode, op0);
8723
8724	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8725			       gen_int_mode (tree_to_shwi (exp1),
8726					     TYPE_MODE (TREE_TYPE (exp1)))));
8727	}
8728
8729      if (modifier == EXPAND_STACK_PARM)
8730	target = 0;
8731
8732      expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8733      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8734
8735    case TRUNC_DIV_EXPR:
8736    case FLOOR_DIV_EXPR:
8737    case CEIL_DIV_EXPR:
8738    case ROUND_DIV_EXPR:
8739    case EXACT_DIV_EXPR:
8740      /* If this is a fixed-point operation, then we cannot use the code
8741	 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8742         divisions.   */
8743      if (ALL_FIXED_POINT_MODE_P (mode))
8744	goto binop;
8745
8746      if (modifier == EXPAND_STACK_PARM)
8747	target = 0;
8748      /* Possible optimization: compute the dividend with EXPAND_SUM
8749	 then if the divisor is constant can optimize the case
8750	 where some terms of the dividend have coeffs divisible by it.  */
8751      expand_operands (treeop0, treeop1,
8752		       subtarget, &op0, &op1, EXPAND_NORMAL);
8753      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8754
8755    case RDIV_EXPR:
8756      goto binop;
8757
8758    case MULT_HIGHPART_EXPR:
8759      expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8760      temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8761      gcc_assert (temp);
8762      return temp;
8763
8764    case TRUNC_MOD_EXPR:
8765    case FLOOR_MOD_EXPR:
8766    case CEIL_MOD_EXPR:
8767    case ROUND_MOD_EXPR:
8768      if (modifier == EXPAND_STACK_PARM)
8769	target = 0;
8770      expand_operands (treeop0, treeop1,
8771		       subtarget, &op0, &op1, EXPAND_NORMAL);
8772      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8773
8774    case FIXED_CONVERT_EXPR:
8775      op0 = expand_normal (treeop0);
8776      if (target == 0 || modifier == EXPAND_STACK_PARM)
8777	target = gen_reg_rtx (mode);
8778
8779      if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8780	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8781          || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8782	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8783      else
8784	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8785      return target;
8786
8787    case FIX_TRUNC_EXPR:
8788      op0 = expand_normal (treeop0);
8789      if (target == 0 || modifier == EXPAND_STACK_PARM)
8790	target = gen_reg_rtx (mode);
8791      expand_fix (target, op0, unsignedp);
8792      return target;
8793
8794    case FLOAT_EXPR:
8795      op0 = expand_normal (treeop0);
8796      if (target == 0 || modifier == EXPAND_STACK_PARM)
8797	target = gen_reg_rtx (mode);
8798      /* expand_float can't figure out what to do if FROM has VOIDmode.
8799	 So give it the correct mode.  With -O, cse will optimize this.  */
8800      if (GET_MODE (op0) == VOIDmode)
8801	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8802				op0);
8803      expand_float (target, op0,
8804		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8805      return target;
8806
8807    case NEGATE_EXPR:
8808      op0 = expand_expr (treeop0, subtarget,
8809			 VOIDmode, EXPAND_NORMAL);
8810      if (modifier == EXPAND_STACK_PARM)
8811	target = 0;
8812      temp = expand_unop (mode,
8813      			  optab_for_tree_code (NEGATE_EXPR, type,
8814					       optab_default),
8815			  op0, target, 0);
8816      gcc_assert (temp);
8817      return REDUCE_BIT_FIELD (temp);
8818
8819    case ABS_EXPR:
8820      op0 = expand_expr (treeop0, subtarget,
8821			 VOIDmode, EXPAND_NORMAL);
8822      if (modifier == EXPAND_STACK_PARM)
8823	target = 0;
8824
8825      /* ABS_EXPR is not valid for complex arguments.  */
8826      gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8827		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8828
8829      /* Unsigned abs is simply the operand.  Testing here means we don't
8830	 risk generating incorrect code below.  */
8831      if (TYPE_UNSIGNED (type))
8832	return op0;
8833
8834      return expand_abs (mode, op0, target, unsignedp,
8835			 safe_from_p (target, treeop0, 1));
8836
8837    case MAX_EXPR:
8838    case MIN_EXPR:
8839      target = original_target;
8840      if (target == 0
8841	  || modifier == EXPAND_STACK_PARM
8842	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8843	  || GET_MODE (target) != mode
8844	  || (REG_P (target)
8845	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8846	target = gen_reg_rtx (mode);
8847      expand_operands (treeop0, treeop1,
8848		       target, &op0, &op1, EXPAND_NORMAL);
8849
8850      /* First try to do it with a special MIN or MAX instruction.
8851	 If that does not win, use a conditional jump to select the proper
8852	 value.  */
8853      this_optab = optab_for_tree_code (code, type, optab_default);
8854      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8855			   OPTAB_WIDEN);
8856      if (temp != 0)
8857	return temp;
8858
8859      /* At this point, a MEM target is no longer useful; we will get better
8860	 code without it.  */
8861
8862      if (! REG_P (target))
8863	target = gen_reg_rtx (mode);
8864
8865      /* If op1 was placed in target, swap op0 and op1.  */
8866      if (target != op0 && target == op1)
8867	{
8868	  temp = op0;
8869	  op0 = op1;
8870	  op1 = temp;
8871	}
8872
8873      /* We generate better code and avoid problems with op1 mentioning
8874	 target by forcing op1 into a pseudo if it isn't a constant.  */
8875      if (! CONSTANT_P (op1))
8876	op1 = force_reg (mode, op1);
8877
8878      {
8879	enum rtx_code comparison_code;
8880	rtx cmpop1 = op1;
8881
8882	if (code == MAX_EXPR)
8883	  comparison_code = unsignedp ? GEU : GE;
8884	else
8885	  comparison_code = unsignedp ? LEU : LE;
8886
8887	/* Canonicalize to comparisons against 0.  */
8888	if (op1 == const1_rtx)
8889	  {
8890	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8891	       or (a != 0 ? a : 1) for unsigned.
8892	       For MIN we are safe converting (a <= 1 ? a : 1)
8893	       into (a <= 0 ? a : 1)  */
8894	    cmpop1 = const0_rtx;
8895	    if (code == MAX_EXPR)
8896	      comparison_code = unsignedp ? NE : GT;
8897	  }
8898	if (op1 == constm1_rtx && !unsignedp)
8899	  {
8900	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8901	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8902	    cmpop1 = const0_rtx;
8903	    if (code == MIN_EXPR)
8904	      comparison_code = LT;
8905	  }
8906#ifdef HAVE_conditional_move
8907	/* Use a conditional move if possible.  */
8908	if (can_conditionally_move_p (mode))
8909	  {
8910	    rtx insn;
8911
8912	    start_sequence ();
8913
8914	    /* Try to emit the conditional move.  */
8915	    insn = emit_conditional_move (target, comparison_code,
8916					  op0, cmpop1, mode,
8917					  op0, op1, mode,
8918					  unsignedp);
8919
8920	    /* If we could do the conditional move, emit the sequence,
8921	       and return.  */
8922	    if (insn)
8923	      {
8924		rtx_insn *seq = get_insns ();
8925		end_sequence ();
8926		emit_insn (seq);
8927		return target;
8928	      }
8929
8930	    /* Otherwise discard the sequence and fall back to code with
8931	       branches.  */
8932	    end_sequence ();
8933	  }
8934#endif
8935	if (target != op0)
8936	  emit_move_insn (target, op0);
8937
8938	temp = gen_label_rtx ();
8939	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8940				 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8941				 -1);
8942      }
8943      emit_move_insn (target, op1);
8944      emit_label (temp);
8945      return target;
8946
8947    case BIT_NOT_EXPR:
8948      op0 = expand_expr (treeop0, subtarget,
8949			 VOIDmode, EXPAND_NORMAL);
8950      if (modifier == EXPAND_STACK_PARM)
8951	target = 0;
8952      /* In case we have to reduce the result to bitfield precision
8953	 for unsigned bitfield expand this as XOR with a proper constant
8954	 instead.  */
8955      if (reduce_bit_field && TYPE_UNSIGNED (type))
8956	{
8957	  wide_int mask = wi::mask (TYPE_PRECISION (type),
8958				    false, GET_MODE_PRECISION (mode));
8959
8960	  temp = expand_binop (mode, xor_optab, op0,
8961			       immed_wide_int_const (mask, mode),
8962			       target, 1, OPTAB_LIB_WIDEN);
8963	}
8964      else
8965	temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8966      gcc_assert (temp);
8967      return temp;
8968
8969      /* ??? Can optimize bitwise operations with one arg constant.
8970	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8971	 and (a bitwise1 b) bitwise2 b (etc)
8972	 but that is probably not worth while.  */
8973
8974    case BIT_AND_EXPR:
8975    case BIT_IOR_EXPR:
8976    case BIT_XOR_EXPR:
8977      goto binop;
8978
8979    case LROTATE_EXPR:
8980    case RROTATE_EXPR:
8981      gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8982		  || (GET_MODE_PRECISION (TYPE_MODE (type))
8983		      == TYPE_PRECISION (type)));
8984      /* fall through */
8985
8986    case LSHIFT_EXPR:
8987    case RSHIFT_EXPR:
8988      /* If this is a fixed-point operation, then we cannot use the code
8989	 below because "expand_shift" doesn't support sat/no-sat fixed-point
8990         shifts.   */
8991      if (ALL_FIXED_POINT_MODE_P (mode))
8992	goto binop;
8993
8994      if (! safe_from_p (subtarget, treeop1, 1))
8995	subtarget = 0;
8996      if (modifier == EXPAND_STACK_PARM)
8997	target = 0;
8998      op0 = expand_expr (treeop0, subtarget,
8999			 VOIDmode, EXPAND_NORMAL);
9000      temp = expand_variable_shift (code, mode, op0, treeop1, target,
9001				    unsignedp);
9002      if (code == LSHIFT_EXPR)
9003	temp = REDUCE_BIT_FIELD (temp);
9004      return temp;
9005
9006      /* Could determine the answer when only additive constants differ.  Also,
9007	 the addition of one can be handled by changing the condition.  */
9008    case LT_EXPR:
9009    case LE_EXPR:
9010    case GT_EXPR:
9011    case GE_EXPR:
9012    case EQ_EXPR:
9013    case NE_EXPR:
9014    case UNORDERED_EXPR:
9015    case ORDERED_EXPR:
9016    case UNLT_EXPR:
9017    case UNLE_EXPR:
9018    case UNGT_EXPR:
9019    case UNGE_EXPR:
9020    case UNEQ_EXPR:
9021    case LTGT_EXPR:
9022      temp = do_store_flag (ops,
9023			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9024			    tmode != VOIDmode ? tmode : mode);
9025      if (temp)
9026	return temp;
9027
9028      /* Use a compare and a jump for BLKmode comparisons, or for function
9029	 type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
9030
9031      if ((target == 0
9032	   || modifier == EXPAND_STACK_PARM
9033	   || ! safe_from_p (target, treeop0, 1)
9034	   || ! safe_from_p (target, treeop1, 1)
9035	   /* Make sure we don't have a hard reg (such as function's return
9036	      value) live across basic blocks, if not optimizing.  */
9037	   || (!optimize && REG_P (target)
9038	       && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9039	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9040
9041      emit_move_insn (target, const0_rtx);
9042
9043      op1 = gen_label_rtx ();
9044      jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9045
9046      if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9047	emit_move_insn (target, constm1_rtx);
9048      else
9049	emit_move_insn (target, const1_rtx);
9050
9051      emit_label (op1);
9052      return target;
9053
9054    case COMPLEX_EXPR:
9055      /* Get the rtx code of the operands.  */
9056      op0 = expand_normal (treeop0);
9057      op1 = expand_normal (treeop1);
9058
9059      if (!target)
9060	target = gen_reg_rtx (TYPE_MODE (type));
9061      else
9062	/* If target overlaps with op1, then either we need to force
9063	   op1 into a pseudo (if target also overlaps with op0),
9064	   or write the complex parts in reverse order.  */
9065	switch (GET_CODE (target))
9066	  {
9067	  case CONCAT:
9068	    if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9069	      {
9070		if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9071		  {
9072		  complex_expr_force_op1:
9073		    temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9074		    emit_move_insn (temp, op1);
9075		    op1 = temp;
9076		    break;
9077		  }
9078	      complex_expr_swap_order:
9079		/* Move the imaginary (op1) and real (op0) parts to their
9080		   location.  */
9081		write_complex_part (target, op1, true);
9082		write_complex_part (target, op0, false);
9083
9084		return target;
9085	      }
9086	    break;
9087	  case MEM:
9088	    temp = adjust_address_nv (target,
9089				      GET_MODE_INNER (GET_MODE (target)), 0);
9090	    if (reg_overlap_mentioned_p (temp, op1))
9091	      {
9092		machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9093		temp = adjust_address_nv (target, imode,
9094					  GET_MODE_SIZE (imode));
9095		if (reg_overlap_mentioned_p (temp, op0))
9096		  goto complex_expr_force_op1;
9097		goto complex_expr_swap_order;
9098	      }
9099	    break;
9100	  default:
9101	    if (reg_overlap_mentioned_p (target, op1))
9102	      {
9103		if (reg_overlap_mentioned_p (target, op0))
9104		  goto complex_expr_force_op1;
9105		goto complex_expr_swap_order;
9106	      }
9107	    break;
9108	  }
9109
9110      /* Move the real (op0) and imaginary (op1) parts to their location.  */
9111      write_complex_part (target, op0, false);
9112      write_complex_part (target, op1, true);
9113
9114      return target;
9115
9116    case WIDEN_SUM_EXPR:
9117      {
9118        tree oprnd0 = treeop0;
9119        tree oprnd1 = treeop1;
9120
9121        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9122        target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9123                                            target, unsignedp);
9124        return target;
9125      }
9126
9127    case REDUC_MAX_EXPR:
9128    case REDUC_MIN_EXPR:
9129    case REDUC_PLUS_EXPR:
9130      {
9131        op0 = expand_normal (treeop0);
9132        this_optab = optab_for_tree_code (code, type, optab_default);
9133        machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9134
9135	if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9136	  {
9137	    struct expand_operand ops[2];
9138	    enum insn_code icode = optab_handler (this_optab, vec_mode);
9139
9140	    create_output_operand (&ops[0], target, mode);
9141	    create_input_operand (&ops[1], op0, vec_mode);
9142	    if (maybe_expand_insn (icode, 2, ops))
9143	      {
9144		target = ops[0].value;
9145		if (GET_MODE (target) != mode)
9146		  return gen_lowpart (tmode, target);
9147		return target;
9148	      }
9149	  }
9150	/* Fall back to optab with vector result, and then extract scalar.  */
9151	this_optab = scalar_reduc_to_vector (this_optab, type);
9152        temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9153        gcc_assert (temp);
9154        /* The tree code produces a scalar result, but (somewhat by convention)
9155           the optab produces a vector with the result in element 0 if
9156           little-endian, or element N-1 if big-endian.  So pull the scalar
9157           result out of that element.  */
9158        int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9159        int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9160        temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9161				  target, mode, mode);
9162        gcc_assert (temp);
9163        return temp;
9164      }
9165
9166    case VEC_UNPACK_HI_EXPR:
9167    case VEC_UNPACK_LO_EXPR:
9168      {
9169	op0 = expand_normal (treeop0);
9170	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9171					  target, unsignedp);
9172	gcc_assert (temp);
9173	return temp;
9174      }
9175
9176    case VEC_UNPACK_FLOAT_HI_EXPR:
9177    case VEC_UNPACK_FLOAT_LO_EXPR:
9178      {
9179	op0 = expand_normal (treeop0);
9180	/* The signedness is determined from input operand.  */
9181	temp = expand_widen_pattern_expr
9182	  (ops, op0, NULL_RTX, NULL_RTX,
9183	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9184
9185	gcc_assert (temp);
9186	return temp;
9187      }
9188
9189    case VEC_WIDEN_MULT_HI_EXPR:
9190    case VEC_WIDEN_MULT_LO_EXPR:
9191    case VEC_WIDEN_MULT_EVEN_EXPR:
9192    case VEC_WIDEN_MULT_ODD_EXPR:
9193    case VEC_WIDEN_LSHIFT_HI_EXPR:
9194    case VEC_WIDEN_LSHIFT_LO_EXPR:
9195      expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9196      target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9197					  target, unsignedp);
9198      gcc_assert (target);
9199      return target;
9200
9201    case VEC_PACK_TRUNC_EXPR:
9202    case VEC_PACK_SAT_EXPR:
9203    case VEC_PACK_FIX_TRUNC_EXPR:
9204      mode = TYPE_MODE (TREE_TYPE (treeop0));
9205      goto binop;
9206
9207    case VEC_PERM_EXPR:
9208      expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9209      op2 = expand_normal (treeop2);
9210
9211      /* Careful here: if the target doesn't support integral vector modes,
9212	 a constant selection vector could wind up smooshed into a normal
9213	 integral constant.  */
9214      if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9215	{
9216	  tree sel_type = TREE_TYPE (treeop2);
9217	  machine_mode vmode
9218	    = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9219			       TYPE_VECTOR_SUBPARTS (sel_type));
9220	  gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9221	  op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9222	  gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9223	}
9224      else
9225        gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9226
9227      temp = expand_vec_perm (mode, op0, op1, op2, target);
9228      gcc_assert (temp);
9229      return temp;
9230
9231    case DOT_PROD_EXPR:
9232      {
9233	tree oprnd0 = treeop0;
9234	tree oprnd1 = treeop1;
9235	tree oprnd2 = treeop2;
9236	rtx op2;
9237
9238	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9239	op2 = expand_normal (oprnd2);
9240	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9241					    target, unsignedp);
9242	return target;
9243      }
9244
9245      case SAD_EXPR:
9246      {
9247	tree oprnd0 = treeop0;
9248	tree oprnd1 = treeop1;
9249	tree oprnd2 = treeop2;
9250	rtx op2;
9251
9252	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9253	op2 = expand_normal (oprnd2);
9254	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9255					    target, unsignedp);
9256	return target;
9257      }
9258
9259    case REALIGN_LOAD_EXPR:
9260      {
9261        tree oprnd0 = treeop0;
9262        tree oprnd1 = treeop1;
9263        tree oprnd2 = treeop2;
9264        rtx op2;
9265
9266        this_optab = optab_for_tree_code (code, type, optab_default);
9267        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9268        op2 = expand_normal (oprnd2);
9269        temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9270				  target, unsignedp);
9271        gcc_assert (temp);
9272        return temp;
9273      }
9274
9275    case COND_EXPR:
9276      /* A COND_EXPR with its type being VOID_TYPE represents a
9277	 conditional jump and is handled in
9278	 expand_gimple_cond_expr.  */
9279      gcc_assert (!VOID_TYPE_P (type));
9280
9281      /* Note that COND_EXPRs whose type is a structure or union
9282	 are required to be constructed to contain assignments of
9283	 a temporary variable, so that we can evaluate them here
9284	 for side effect only.  If type is void, we must do likewise.  */
9285
9286      gcc_assert (!TREE_ADDRESSABLE (type)
9287		  && !ignore
9288		  && TREE_TYPE (treeop1) != void_type_node
9289		  && TREE_TYPE (treeop2) != void_type_node);
9290
9291      temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9292      if (temp)
9293	return temp;
9294
9295      /* If we are not to produce a result, we have no target.  Otherwise,
9296	 if a target was specified use it; it will not be used as an
9297	 intermediate target unless it is safe.  If no target, use a
9298	 temporary.  */
9299
9300      if (modifier != EXPAND_STACK_PARM
9301	  && original_target
9302	  && safe_from_p (original_target, treeop0, 1)
9303	  && GET_MODE (original_target) == mode
9304	  && !MEM_P (original_target))
9305	temp = original_target;
9306      else
9307	temp = assign_temp (type, 0, 1);
9308
9309      do_pending_stack_adjust ();
9310      NO_DEFER_POP;
9311      op0 = gen_label_rtx ();
9312      op1 = gen_label_rtx ();
9313      jumpifnot (treeop0, op0, -1);
9314      store_expr (treeop1, temp,
9315		  modifier == EXPAND_STACK_PARM,
9316		  false);
9317
9318      emit_jump_insn (gen_jump (op1));
9319      emit_barrier ();
9320      emit_label (op0);
9321      store_expr (treeop2, temp,
9322		  modifier == EXPAND_STACK_PARM,
9323		  false);
9324
9325      emit_label (op1);
9326      OK_DEFER_POP;
9327      return temp;
9328
9329    case VEC_COND_EXPR:
9330      target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9331      return target;
9332
9333    default:
9334      gcc_unreachable ();
9335    }
9336
9337  /* Here to do an ordinary binary operator.  */
9338 binop:
9339  expand_operands (treeop0, treeop1,
9340		   subtarget, &op0, &op1, EXPAND_NORMAL);
9341 binop2:
9342  this_optab = optab_for_tree_code (code, type, optab_default);
9343 binop3:
9344  if (modifier == EXPAND_STACK_PARM)
9345    target = 0;
9346  temp = expand_binop (mode, this_optab, op0, op1, target,
9347		       unsignedp, OPTAB_LIB_WIDEN);
9348  gcc_assert (temp);
9349  /* Bitwise operations do not need bitfield reduction as we expect their
9350     operands being properly truncated.  */
9351  if (code == BIT_XOR_EXPR
9352      || code == BIT_AND_EXPR
9353      || code == BIT_IOR_EXPR)
9354    return temp;
9355  return REDUCE_BIT_FIELD (temp);
9356}
9357#undef REDUCE_BIT_FIELD
9358
9359
9360/* Return TRUE if expression STMT is suitable for replacement.
9361   Never consider memory loads as replaceable, because those don't ever lead
9362   into constant expressions.  */
9363
9364static bool
9365stmt_is_replaceable_p (gimple stmt)
9366{
9367  if (ssa_is_replaceable_p (stmt))
9368    {
9369      /* Don't move around loads.  */
9370      if (!gimple_assign_single_p (stmt)
9371	  || is_gimple_val (gimple_assign_rhs1 (stmt)))
9372	return true;
9373    }
9374  return false;
9375}
9376
9377rtx
9378expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9379		    enum expand_modifier modifier, rtx *alt_rtl,
9380		    bool inner_reference_p)
9381{
9382  rtx op0, op1, temp, decl_rtl;
9383  tree type;
9384  int unsignedp;
9385  machine_mode mode;
9386  enum tree_code code = TREE_CODE (exp);
9387  rtx subtarget, original_target;
9388  int ignore;
9389  tree context;
9390  bool reduce_bit_field;
9391  location_t loc = EXPR_LOCATION (exp);
9392  struct separate_ops ops;
9393  tree treeop0, treeop1, treeop2;
9394  tree ssa_name = NULL_TREE;
9395  gimple g;
9396
9397  type = TREE_TYPE (exp);
9398  mode = TYPE_MODE (type);
9399  unsignedp = TYPE_UNSIGNED (type);
9400
9401  treeop0 = treeop1 = treeop2 = NULL_TREE;
9402  if (!VL_EXP_CLASS_P (exp))
9403    switch (TREE_CODE_LENGTH (code))
9404      {
9405	default:
9406	case 3: treeop2 = TREE_OPERAND (exp, 2);
9407	case 2: treeop1 = TREE_OPERAND (exp, 1);
9408	case 1: treeop0 = TREE_OPERAND (exp, 0);
9409	case 0: break;
9410      }
9411  ops.code = code;
9412  ops.type = type;
9413  ops.op0 = treeop0;
9414  ops.op1 = treeop1;
9415  ops.op2 = treeop2;
9416  ops.location = loc;
9417
9418  ignore = (target == const0_rtx
9419	    || ((CONVERT_EXPR_CODE_P (code)
9420		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9421		&& TREE_CODE (type) == VOID_TYPE));
9422
9423  /* An operation in what may be a bit-field type needs the
9424     result to be reduced to the precision of the bit-field type,
9425     which is narrower than that of the type's mode.  */
9426  reduce_bit_field = (!ignore
9427		      && INTEGRAL_TYPE_P (type)
9428		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9429
9430  /* If we are going to ignore this result, we need only do something
9431     if there is a side-effect somewhere in the expression.  If there
9432     is, short-circuit the most common cases here.  Note that we must
9433     not call expand_expr with anything but const0_rtx in case this
9434     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9435
9436  if (ignore)
9437    {
9438      if (! TREE_SIDE_EFFECTS (exp))
9439	return const0_rtx;
9440
9441      /* Ensure we reference a volatile object even if value is ignored, but
9442	 don't do this if all we are doing is taking its address.  */
9443      if (TREE_THIS_VOLATILE (exp)
9444	  && TREE_CODE (exp) != FUNCTION_DECL
9445	  && mode != VOIDmode && mode != BLKmode
9446	  && modifier != EXPAND_CONST_ADDRESS)
9447	{
9448	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9449	  if (MEM_P (temp))
9450	    copy_to_reg (temp);
9451	  return const0_rtx;
9452	}
9453
9454      if (TREE_CODE_CLASS (code) == tcc_unary
9455	  || code == BIT_FIELD_REF
9456	  || code == COMPONENT_REF
9457	  || code == INDIRECT_REF)
9458	return expand_expr (treeop0, const0_rtx, VOIDmode,
9459			    modifier);
9460
9461      else if (TREE_CODE_CLASS (code) == tcc_binary
9462	       || TREE_CODE_CLASS (code) == tcc_comparison
9463	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9464	{
9465	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9466	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9467	  return const0_rtx;
9468	}
9469
9470      target = 0;
9471    }
9472
9473  if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9474    target = 0;
9475
9476  /* Use subtarget as the target for operand 0 of a binary operation.  */
9477  subtarget = get_subtarget (target);
9478  original_target = target;
9479
9480  switch (code)
9481    {
9482    case LABEL_DECL:
9483      {
9484	tree function = decl_function_context (exp);
9485
9486	temp = label_rtx (exp);
9487	temp = gen_rtx_LABEL_REF (Pmode, temp);
9488
9489	if (function != current_function_decl
9490	    && function != 0)
9491	  LABEL_REF_NONLOCAL_P (temp) = 1;
9492
9493	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9494	return temp;
9495      }
9496
9497    case SSA_NAME:
9498      /* ??? ivopts calls expander, without any preparation from
9499         out-of-ssa.  So fake instructions as if this was an access to the
9500	 base variable.  This unnecessarily allocates a pseudo, see how we can
9501	 reuse it, if partition base vars have it set already.  */
9502      if (!currently_expanding_to_rtl)
9503	{
9504	  tree var = SSA_NAME_VAR (exp);
9505	  if (var && DECL_RTL_SET_P (var))
9506	    return DECL_RTL (var);
9507	  return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9508			      LAST_VIRTUAL_REGISTER + 1);
9509	}
9510
9511      g = get_gimple_for_ssa_name (exp);
9512      /* For EXPAND_INITIALIZER try harder to get something simpler.  */
9513      if (g == NULL
9514	  && modifier == EXPAND_INITIALIZER
9515	  && !SSA_NAME_IS_DEFAULT_DEF (exp)
9516	  && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9517	  && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9518	g = SSA_NAME_DEF_STMT (exp);
9519      if (g)
9520	{
9521	  rtx r;
9522	  ops.code = gimple_assign_rhs_code (g);
9523          switch (get_gimple_rhs_class (ops.code))
9524	    {
9525	    case GIMPLE_TERNARY_RHS:
9526	      ops.op2 = gimple_assign_rhs3 (g);
9527	      /* Fallthru */
9528	    case GIMPLE_BINARY_RHS:
9529	      ops.op1 = gimple_assign_rhs2 (g);
9530
9531	      /* Try to expand conditonal compare.  */
9532	      if (targetm.gen_ccmp_first)
9533		{
9534		  gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9535		  r = expand_ccmp_expr (g);
9536		  if (r)
9537		    break;
9538		}
9539	      /* Fallthru */
9540	    case GIMPLE_UNARY_RHS:
9541	      ops.op0 = gimple_assign_rhs1 (g);
9542	      ops.type = TREE_TYPE (gimple_assign_lhs (g));
9543	      ops.location = gimple_location (g);
9544	      r = expand_expr_real_2 (&ops, target, tmode, modifier);
9545	      break;
9546	    case GIMPLE_SINGLE_RHS:
9547	      {
9548		location_t saved_loc = curr_insn_location ();
9549		set_curr_insn_location (gimple_location (g));
9550		r = expand_expr_real (gimple_assign_rhs1 (g), target,
9551				      tmode, modifier, NULL, inner_reference_p);
9552		set_curr_insn_location (saved_loc);
9553		break;
9554	      }
9555	    default:
9556	      gcc_unreachable ();
9557	    }
9558	  if (REG_P (r) && !REG_EXPR (r))
9559	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9560	  return r;
9561	}
9562
9563      ssa_name = exp;
9564      decl_rtl = get_rtx_for_ssa_name (ssa_name);
9565      exp = SSA_NAME_VAR (ssa_name);
9566      goto expand_decl_rtl;
9567
9568    case PARM_DECL:
9569    case VAR_DECL:
9570      /* If a static var's type was incomplete when the decl was written,
9571	 but the type is complete now, lay out the decl now.  */
9572      if (DECL_SIZE (exp) == 0
9573	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9574	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9575	layout_decl (exp, 0);
9576
9577      /* ... fall through ...  */
9578
9579    case FUNCTION_DECL:
9580    case RESULT_DECL:
9581      decl_rtl = DECL_RTL (exp);
9582    expand_decl_rtl:
9583      gcc_assert (decl_rtl);
9584      decl_rtl = copy_rtx (decl_rtl);
9585      /* Record writes to register variables.  */
9586      if (modifier == EXPAND_WRITE
9587	  && REG_P (decl_rtl)
9588	  && HARD_REGISTER_P (decl_rtl))
9589        add_to_hard_reg_set (&crtl->asm_clobbers,
9590			     GET_MODE (decl_rtl), REGNO (decl_rtl));
9591
9592      /* Ensure variable marked as used even if it doesn't go through
9593	 a parser.  If it hasn't be used yet, write out an external
9594	 definition.  */
9595      TREE_USED (exp) = 1;
9596
9597      /* Show we haven't gotten RTL for this yet.  */
9598      temp = 0;
9599
9600      /* Variables inherited from containing functions should have
9601	 been lowered by this point.  */
9602      context = decl_function_context (exp);
9603      gcc_assert (SCOPE_FILE_SCOPE_P (context)
9604		  || context == current_function_decl
9605		  || TREE_STATIC (exp)
9606		  || DECL_EXTERNAL (exp)
9607		  /* ??? C++ creates functions that are not TREE_STATIC.  */
9608		  || TREE_CODE (exp) == FUNCTION_DECL);
9609
9610      /* This is the case of an array whose size is to be determined
9611	 from its initializer, while the initializer is still being parsed.
9612	 ??? We aren't parsing while expanding anymore.  */
9613
9614      if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9615	temp = validize_mem (decl_rtl);
9616
9617      /* If DECL_RTL is memory, we are in the normal case and the
9618	 address is not valid, get the address into a register.  */
9619
9620      else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9621	{
9622	  if (alt_rtl)
9623	    *alt_rtl = decl_rtl;
9624	  decl_rtl = use_anchored_address (decl_rtl);
9625	  if (modifier != EXPAND_CONST_ADDRESS
9626	      && modifier != EXPAND_SUM
9627	      && !memory_address_addr_space_p (DECL_MODE (exp),
9628					       XEXP (decl_rtl, 0),
9629					       MEM_ADDR_SPACE (decl_rtl)))
9630	    temp = replace_equiv_address (decl_rtl,
9631					  copy_rtx (XEXP (decl_rtl, 0)));
9632	}
9633
9634      /* If we got something, return it.  But first, set the alignment
9635	 if the address is a register.  */
9636      if (temp != 0)
9637	{
9638	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9639	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9640
9641	  return temp;
9642	}
9643
9644      /* If the mode of DECL_RTL does not match that of the decl,
9645	 there are two cases: we are dealing with a BLKmode value
9646	 that is returned in a register, or we are dealing with
9647	 a promoted value.  In the latter case, return a SUBREG
9648	 of the wanted mode, but mark it so that we know that it
9649	 was already extended.  */
9650      if (REG_P (decl_rtl)
9651	  && DECL_MODE (exp) != BLKmode
9652	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
9653	{
9654	  machine_mode pmode;
9655
9656	  /* Get the signedness to be used for this variable.  Ensure we get
9657	     the same mode we got when the variable was declared.  */
9658	  if (code == SSA_NAME
9659	      && (g = SSA_NAME_DEF_STMT (ssa_name))
9660	      && gimple_code (g) == GIMPLE_CALL
9661	      && !gimple_call_internal_p (g))
9662	    pmode = promote_function_mode (type, mode, &unsignedp,
9663					   gimple_call_fntype (g),
9664					   2);
9665	  else
9666	    pmode = promote_decl_mode (exp, &unsignedp);
9667	  gcc_assert (GET_MODE (decl_rtl) == pmode);
9668
9669	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
9670	  SUBREG_PROMOTED_VAR_P (temp) = 1;
9671	  SUBREG_PROMOTED_SET (temp, unsignedp);
9672	  return temp;
9673	}
9674
9675      return decl_rtl;
9676
9677    case INTEGER_CST:
9678      /* Given that TYPE_PRECISION (type) is not always equal to
9679         GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9680         the former to the latter according to the signedness of the
9681         type. */
9682      temp = immed_wide_int_const (wide_int::from
9683				   (exp,
9684				    GET_MODE_PRECISION (TYPE_MODE (type)),
9685				    TYPE_SIGN (type)),
9686				   TYPE_MODE (type));
9687      return temp;
9688
9689    case VECTOR_CST:
9690      {
9691	tree tmp = NULL_TREE;
9692	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9693	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9694	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9695	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9696	    || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9697	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9698	  return const_vector_from_tree (exp);
9699	if (GET_MODE_CLASS (mode) == MODE_INT)
9700	  {
9701	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9702	    if (type_for_mode)
9703	      tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9704	  }
9705	if (!tmp)
9706	  {
9707	    vec<constructor_elt, va_gc> *v;
9708	    unsigned i;
9709	    vec_alloc (v, VECTOR_CST_NELTS (exp));
9710	    for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9711	      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9712	    tmp = build_constructor (type, v);
9713	  }
9714	return expand_expr (tmp, ignore ? const0_rtx : target,
9715			    tmode, modifier);
9716      }
9717
9718    case CONST_DECL:
9719      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9720
9721    case REAL_CST:
9722      /* If optimized, generate immediate CONST_DOUBLE
9723	 which will be turned into memory by reload if necessary.
9724
9725	 We used to force a register so that loop.c could see it.  But
9726	 this does not allow gen_* patterns to perform optimizations with
9727	 the constants.  It also produces two insns in cases like "x = 1.0;".
9728	 On most machines, floating-point constants are not permitted in
9729	 many insns, so we'd end up copying it to a register in any case.
9730
9731	 Now, we do the copying in expand_binop, if appropriate.  */
9732      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9733					   TYPE_MODE (TREE_TYPE (exp)));
9734
9735    case FIXED_CST:
9736      return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9737					   TYPE_MODE (TREE_TYPE (exp)));
9738
9739    case COMPLEX_CST:
9740      /* Handle evaluating a complex constant in a CONCAT target.  */
9741      if (original_target && GET_CODE (original_target) == CONCAT)
9742	{
9743	  machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9744	  rtx rtarg, itarg;
9745
9746	  rtarg = XEXP (original_target, 0);
9747	  itarg = XEXP (original_target, 1);
9748
9749	  /* Move the real and imaginary parts separately.  */
9750	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9751	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9752
9753	  if (op0 != rtarg)
9754	    emit_move_insn (rtarg, op0);
9755	  if (op1 != itarg)
9756	    emit_move_insn (itarg, op1);
9757
9758	  return original_target;
9759	}
9760
9761      /* ... fall through ...  */
9762
9763    case STRING_CST:
9764      temp = expand_expr_constant (exp, 1, modifier);
9765
9766      /* temp contains a constant address.
9767	 On RISC machines where a constant address isn't valid,
9768	 make some insns to get that address into a register.  */
9769      if (modifier != EXPAND_CONST_ADDRESS
9770	  && modifier != EXPAND_INITIALIZER
9771	  && modifier != EXPAND_SUM
9772	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9773					    MEM_ADDR_SPACE (temp)))
9774	return replace_equiv_address (temp,
9775				      copy_rtx (XEXP (temp, 0)));
9776      return temp;
9777
9778    case SAVE_EXPR:
9779      {
9780	tree val = treeop0;
9781	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9782				      inner_reference_p);
9783
9784	if (!SAVE_EXPR_RESOLVED_P (exp))
9785	  {
9786	    /* We can indeed still hit this case, typically via builtin
9787	       expanders calling save_expr immediately before expanding
9788	       something.  Assume this means that we only have to deal
9789	       with non-BLKmode values.  */
9790	    gcc_assert (GET_MODE (ret) != BLKmode);
9791
9792	    val = build_decl (curr_insn_location (),
9793			      VAR_DECL, NULL, TREE_TYPE (exp));
9794	    DECL_ARTIFICIAL (val) = 1;
9795	    DECL_IGNORED_P (val) = 1;
9796	    treeop0 = val;
9797	    TREE_OPERAND (exp, 0) = treeop0;
9798	    SAVE_EXPR_RESOLVED_P (exp) = 1;
9799
9800	    if (!CONSTANT_P (ret))
9801	      ret = copy_to_reg (ret);
9802	    SET_DECL_RTL (val, ret);
9803	  }
9804
9805        return ret;
9806      }
9807
9808
9809    case CONSTRUCTOR:
9810      /* If we don't need the result, just ensure we evaluate any
9811	 subexpressions.  */
9812      if (ignore)
9813	{
9814	  unsigned HOST_WIDE_INT idx;
9815	  tree value;
9816
9817	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9818	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9819
9820	  return const0_rtx;
9821	}
9822
9823      return expand_constructor (exp, target, modifier, false);
9824
9825    case TARGET_MEM_REF:
9826      {
9827	addr_space_t as
9828	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9829	enum insn_code icode;
9830	unsigned int align;
9831
9832	op0 = addr_for_mem_ref (exp, as, true);
9833	op0 = memory_address_addr_space (mode, op0, as);
9834	temp = gen_rtx_MEM (mode, op0);
9835	set_mem_attributes (temp, exp, 0);
9836	set_mem_addr_space (temp, as);
9837	align = get_object_alignment (exp);
9838	if (modifier != EXPAND_WRITE
9839	    && modifier != EXPAND_MEMORY
9840	    && mode != BLKmode
9841	    && align < GET_MODE_ALIGNMENT (mode)
9842	    /* If the target does not have special handling for unaligned
9843	       loads of mode then it can use regular moves for them.  */
9844	    && ((icode = optab_handler (movmisalign_optab, mode))
9845		!= CODE_FOR_nothing))
9846	  {
9847	    struct expand_operand ops[2];
9848
9849	    /* We've already validated the memory, and we're creating a
9850	       new pseudo destination.  The predicates really can't fail,
9851	       nor can the generator.  */
9852	    create_output_operand (&ops[0], NULL_RTX, mode);
9853	    create_fixed_operand (&ops[1], temp);
9854	    expand_insn (icode, 2, ops);
9855	    temp = ops[0].value;
9856	  }
9857	return temp;
9858      }
9859
9860    case MEM_REF:
9861      {
9862	addr_space_t as
9863	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9864	machine_mode address_mode;
9865	tree base = TREE_OPERAND (exp, 0);
9866	gimple def_stmt;
9867	enum insn_code icode;
9868	unsigned align;
9869	/* Handle expansion of non-aliased memory with non-BLKmode.  That
9870	   might end up in a register.  */
9871	if (mem_ref_refers_to_non_mem_p (exp))
9872	  {
9873	    HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9874	    base = TREE_OPERAND (base, 0);
9875	    if (offset == 0
9876		&& tree_fits_uhwi_p (TYPE_SIZE (type))
9877		&& (GET_MODE_BITSIZE (DECL_MODE (base))
9878		    == tree_to_uhwi (TYPE_SIZE (type))))
9879	      return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9880				  target, tmode, modifier);
9881	    if (TYPE_MODE (type) == BLKmode)
9882	      {
9883		temp = assign_stack_temp (DECL_MODE (base),
9884					  GET_MODE_SIZE (DECL_MODE (base)));
9885		store_expr (base, temp, 0, false);
9886		temp = adjust_address (temp, BLKmode, offset);
9887		set_mem_size (temp, int_size_in_bytes (type));
9888		return temp;
9889	      }
9890	    exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9891			  bitsize_int (offset * BITS_PER_UNIT));
9892	    return expand_expr (exp, target, tmode, modifier);
9893	  }
9894	address_mode = targetm.addr_space.address_mode (as);
9895	base = TREE_OPERAND (exp, 0);
9896	if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9897	  {
9898	    tree mask = gimple_assign_rhs2 (def_stmt);
9899	    base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9900			   gimple_assign_rhs1 (def_stmt), mask);
9901	    TREE_OPERAND (exp, 0) = base;
9902	  }
9903	align = get_object_alignment (exp);
9904	op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9905	op0 = memory_address_addr_space (mode, op0, as);
9906	if (!integer_zerop (TREE_OPERAND (exp, 1)))
9907	  {
9908	    rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9909	    op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9910	    op0 = memory_address_addr_space (mode, op0, as);
9911	  }
9912	temp = gen_rtx_MEM (mode, op0);
9913	set_mem_attributes (temp, exp, 0);
9914	set_mem_addr_space (temp, as);
9915	if (TREE_THIS_VOLATILE (exp))
9916	  MEM_VOLATILE_P (temp) = 1;
9917	if (modifier != EXPAND_WRITE
9918	    && modifier != EXPAND_MEMORY
9919	    && !inner_reference_p
9920	    && mode != BLKmode
9921	    && align < GET_MODE_ALIGNMENT (mode))
9922	  {
9923	    if ((icode = optab_handler (movmisalign_optab, mode))
9924		!= CODE_FOR_nothing)
9925	      {
9926		struct expand_operand ops[2];
9927
9928		/* We've already validated the memory, and we're creating a
9929		   new pseudo destination.  The predicates really can't fail,
9930		   nor can the generator.  */
9931		create_output_operand (&ops[0], NULL_RTX, mode);
9932		create_fixed_operand (&ops[1], temp);
9933		expand_insn (icode, 2, ops);
9934		temp = ops[0].value;
9935	      }
9936	    else if (SLOW_UNALIGNED_ACCESS (mode, align))
9937	      temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9938					0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9939					(modifier == EXPAND_STACK_PARM
9940					 ? NULL_RTX : target),
9941					mode, mode);
9942	  }
9943	return temp;
9944      }
9945
9946    case ARRAY_REF:
9947
9948      {
9949	tree array = treeop0;
9950	tree index = treeop1;
9951	tree init;
9952
9953	/* Fold an expression like: "foo"[2].
9954	   This is not done in fold so it won't happen inside &.
9955	   Don't fold if this is for wide characters since it's too
9956	   difficult to do correctly and this is a very rare case.  */
9957
9958	if (modifier != EXPAND_CONST_ADDRESS
9959	    && modifier != EXPAND_INITIALIZER
9960	    && modifier != EXPAND_MEMORY)
9961	  {
9962	    tree t = fold_read_from_constant_string (exp);
9963
9964	    if (t)
9965	      return expand_expr (t, target, tmode, modifier);
9966	  }
9967
9968	/* If this is a constant index into a constant array,
9969	   just get the value from the array.  Handle both the cases when
9970	   we have an explicit constructor and when our operand is a variable
9971	   that was declared const.  */
9972
9973	if (modifier != EXPAND_CONST_ADDRESS
9974	    && modifier != EXPAND_INITIALIZER
9975	    && modifier != EXPAND_MEMORY
9976	    && TREE_CODE (array) == CONSTRUCTOR
9977	    && ! TREE_SIDE_EFFECTS (array)
9978	    && TREE_CODE (index) == INTEGER_CST)
9979	  {
9980	    unsigned HOST_WIDE_INT ix;
9981	    tree field, value;
9982
9983	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9984				      field, value)
9985	      if (tree_int_cst_equal (field, index))
9986		{
9987		  if (!TREE_SIDE_EFFECTS (value))
9988		    return expand_expr (fold (value), target, tmode, modifier);
9989		  break;
9990		}
9991	  }
9992
9993	else if (optimize >= 1
9994		 && modifier != EXPAND_CONST_ADDRESS
9995		 && modifier != EXPAND_INITIALIZER
9996		 && modifier != EXPAND_MEMORY
9997		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9998		 && TREE_CODE (index) == INTEGER_CST
9999		 && (TREE_CODE (array) == VAR_DECL
10000		     || TREE_CODE (array) == CONST_DECL)
10001		 && (init = ctor_for_folding (array)) != error_mark_node)
10002	  {
10003	    if (init == NULL_TREE)
10004	      {
10005		tree value = build_zero_cst (type);
10006		if (TREE_CODE (value) == CONSTRUCTOR)
10007		  {
10008		    /* If VALUE is a CONSTRUCTOR, this optimization is only
10009		       useful if this doesn't store the CONSTRUCTOR into
10010		       memory.  If it does, it is more efficient to just
10011		       load the data from the array directly.  */
10012		    rtx ret = expand_constructor (value, target,
10013						  modifier, true);
10014		    if (ret == NULL_RTX)
10015		      value = NULL_TREE;
10016		  }
10017
10018		if (value)
10019		  return expand_expr (value, target, tmode, modifier);
10020	      }
10021	    else if (TREE_CODE (init) == CONSTRUCTOR)
10022	      {
10023		unsigned HOST_WIDE_INT ix;
10024		tree field, value;
10025
10026		FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10027					  field, value)
10028		  if (tree_int_cst_equal (field, index))
10029		    {
10030		      if (TREE_SIDE_EFFECTS (value))
10031			break;
10032
10033		      if (TREE_CODE (value) == CONSTRUCTOR)
10034			{
10035			  /* If VALUE is a CONSTRUCTOR, this
10036			     optimization is only useful if
10037			     this doesn't store the CONSTRUCTOR
10038			     into memory.  If it does, it is more
10039			     efficient to just load the data from
10040			     the array directly.  */
10041			  rtx ret = expand_constructor (value, target,
10042							modifier, true);
10043			  if (ret == NULL_RTX)
10044			    break;
10045			}
10046
10047		      return
10048		        expand_expr (fold (value), target, tmode, modifier);
10049		    }
10050	      }
10051	    else if (TREE_CODE (init) == STRING_CST)
10052	      {
10053		tree low_bound = array_ref_low_bound (exp);
10054		tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10055
10056		/* Optimize the special case of a zero lower bound.
10057
10058		   We convert the lower bound to sizetype to avoid problems
10059		   with constant folding.  E.g. suppose the lower bound is
10060		   1 and its mode is QI.  Without the conversion
10061		      (ARRAY + (INDEX - (unsigned char)1))
10062		   becomes
10063		      (ARRAY + (-(unsigned char)1) + INDEX)
10064		   which becomes
10065		      (ARRAY + 255 + INDEX).  Oops!  */
10066		if (!integer_zerop (low_bound))
10067		  index1 = size_diffop_loc (loc, index1,
10068					    fold_convert_loc (loc, sizetype,
10069							      low_bound));
10070
10071		if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10072		  {
10073		    tree type = TREE_TYPE (TREE_TYPE (init));
10074		    machine_mode mode = TYPE_MODE (type);
10075
10076		    if (GET_MODE_CLASS (mode) == MODE_INT
10077			&& GET_MODE_SIZE (mode) == 1)
10078		      return gen_int_mode (TREE_STRING_POINTER (init)
10079					   [TREE_INT_CST_LOW (index1)],
10080					   mode);
10081		  }
10082	      }
10083	  }
10084      }
10085      goto normal_inner_ref;
10086
10087    case COMPONENT_REF:
10088      /* If the operand is a CONSTRUCTOR, we can just extract the
10089	 appropriate field if it is present.  */
10090      if (TREE_CODE (treeop0) == CONSTRUCTOR)
10091	{
10092	  unsigned HOST_WIDE_INT idx;
10093	  tree field, value;
10094
10095	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10096				    idx, field, value)
10097	    if (field == treeop1
10098		/* We can normally use the value of the field in the
10099		   CONSTRUCTOR.  However, if this is a bitfield in
10100		   an integral mode that we can fit in a HOST_WIDE_INT,
10101		   we must mask only the number of bits in the bitfield,
10102		   since this is done implicitly by the constructor.  If
10103		   the bitfield does not meet either of those conditions,
10104		   we can't do this optimization.  */
10105		&& (! DECL_BIT_FIELD (field)
10106		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10107			&& (GET_MODE_PRECISION (DECL_MODE (field))
10108			    <= HOST_BITS_PER_WIDE_INT))))
10109	      {
10110		if (DECL_BIT_FIELD (field)
10111		    && modifier == EXPAND_STACK_PARM)
10112		  target = 0;
10113		op0 = expand_expr (value, target, tmode, modifier);
10114		if (DECL_BIT_FIELD (field))
10115		  {
10116		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10117		    machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10118
10119		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
10120		      {
10121			op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10122					    imode);
10123			op0 = expand_and (imode, op0, op1, target);
10124		      }
10125		    else
10126		      {
10127			int count = GET_MODE_PRECISION (imode) - bitsize;
10128
10129			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10130					    target, 0);
10131			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10132					    target, 0);
10133		      }
10134		  }
10135
10136		return op0;
10137	      }
10138	}
10139      goto normal_inner_ref;
10140
10141    case BIT_FIELD_REF:
10142    case ARRAY_RANGE_REF:
10143    normal_inner_ref:
10144      {
10145	machine_mode mode1, mode2;
10146	HOST_WIDE_INT bitsize, bitpos;
10147	tree offset;
10148	int volatilep = 0, must_force_mem;
10149	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10150					&mode1, &unsignedp, &volatilep, true);
10151	rtx orig_op0, memloc;
10152	bool clear_mem_expr = false;
10153
10154	/* If we got back the original object, something is wrong.  Perhaps
10155	   we are evaluating an expression too early.  In any event, don't
10156	   infinitely recurse.  */
10157	gcc_assert (tem != exp);
10158
10159	/* If TEM's type is a union of variable size, pass TARGET to the inner
10160	   computation, since it will need a temporary and TARGET is known
10161	   to have to do.  This occurs in unchecked conversion in Ada.  */
10162	orig_op0 = op0
10163	  = expand_expr_real (tem,
10164			      (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10165			       && COMPLETE_TYPE_P (TREE_TYPE (tem))
10166			       && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10167				   != INTEGER_CST)
10168			       && modifier != EXPAND_STACK_PARM
10169			       ? target : NULL_RTX),
10170			      VOIDmode,
10171			      modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10172			      NULL, true);
10173
10174	/* If the field has a mode, we want to access it in the
10175	   field's mode, not the computed mode.
10176	   If a MEM has VOIDmode (external with incomplete type),
10177	   use BLKmode for it instead.  */
10178	if (MEM_P (op0))
10179	  {
10180	    if (mode1 != VOIDmode)
10181	      op0 = adjust_address (op0, mode1, 0);
10182	    else if (GET_MODE (op0) == VOIDmode)
10183	      op0 = adjust_address (op0, BLKmode, 0);
10184	  }
10185
10186	mode2
10187	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10188
10189	/* If we have either an offset, a BLKmode result, or a reference
10190	   outside the underlying object, we must force it to memory.
10191	   Such a case can occur in Ada if we have unchecked conversion
10192	   of an expression from a scalar type to an aggregate type or
10193	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10194	   passed a partially uninitialized object or a view-conversion
10195	   to a larger size.  */
10196	must_force_mem = (offset
10197			  || mode1 == BLKmode
10198			  || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10199
10200	/* Handle CONCAT first.  */
10201	if (GET_CODE (op0) == CONCAT && !must_force_mem)
10202	  {
10203	    if (bitpos == 0
10204		&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10205	      return op0;
10206	    if (bitpos == 0
10207		&& bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10208		&& bitsize)
10209	      {
10210		op0 = XEXP (op0, 0);
10211		mode2 = GET_MODE (op0);
10212	      }
10213	    else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10214		     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10215		     && bitpos
10216		     && bitsize)
10217	      {
10218		op0 = XEXP (op0, 1);
10219		bitpos = 0;
10220		mode2 = GET_MODE (op0);
10221	      }
10222	    else
10223	      /* Otherwise force into memory.  */
10224	      must_force_mem = 1;
10225	  }
10226
10227	/* If this is a constant, put it in a register if it is a legitimate
10228	   constant and we don't need a memory reference.  */
10229	if (CONSTANT_P (op0)
10230	    && mode2 != BLKmode
10231	    && targetm.legitimate_constant_p (mode2, op0)
10232	    && !must_force_mem)
10233	  op0 = force_reg (mode2, op0);
10234
10235	/* Otherwise, if this is a constant, try to force it to the constant
10236	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
10237	   is a legitimate constant.  */
10238	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10239	  op0 = validize_mem (memloc);
10240
10241	/* Otherwise, if this is a constant or the object is not in memory
10242	   and need be, put it there.  */
10243	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10244	  {
10245	    memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10246	    emit_move_insn (memloc, op0);
10247	    op0 = memloc;
10248	    clear_mem_expr = true;
10249	  }
10250
10251	if (offset)
10252	  {
10253	    machine_mode address_mode;
10254	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10255					  EXPAND_SUM);
10256
10257	    gcc_assert (MEM_P (op0));
10258
10259	    address_mode = get_address_mode (op0);
10260	    if (GET_MODE (offset_rtx) != address_mode)
10261	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10262
10263	    /* See the comment in expand_assignment for the rationale.  */
10264	    if (mode1 != VOIDmode
10265		&& bitpos != 0
10266		&& bitsize > 0
10267		&& (bitpos % bitsize) == 0
10268		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10269		&& MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10270	      {
10271		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10272		bitpos = 0;
10273	      }
10274
10275	    op0 = offset_address (op0, offset_rtx,
10276				  highest_pow2_factor (offset));
10277	  }
10278
10279	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10280	   record its alignment as BIGGEST_ALIGNMENT.  */
10281	if (MEM_P (op0) && bitpos == 0 && offset != 0
10282	    && is_aligning_offset (offset, tem))
10283	  set_mem_align (op0, BIGGEST_ALIGNMENT);
10284
10285	/* Don't forget about volatility even if this is a bitfield.  */
10286	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10287	  {
10288	    if (op0 == orig_op0)
10289	      op0 = copy_rtx (op0);
10290
10291	    MEM_VOLATILE_P (op0) = 1;
10292	  }
10293
10294	/* In cases where an aligned union has an unaligned object
10295	   as a field, we might be extracting a BLKmode value from
10296	   an integer-mode (e.g., SImode) object.  Handle this case
10297	   by doing the extract into an object as wide as the field
10298	   (which we know to be the width of a basic mode), then
10299	   storing into memory, and changing the mode to BLKmode.  */
10300	if (mode1 == VOIDmode
10301	    || REG_P (op0) || GET_CODE (op0) == SUBREG
10302	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
10303		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10304		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10305		&& modifier != EXPAND_CONST_ADDRESS
10306		&& modifier != EXPAND_INITIALIZER
10307		&& modifier != EXPAND_MEMORY)
10308	    /* If the bitfield is volatile and the bitsize
10309	       is narrower than the access size of the bitfield,
10310	       we need to extract bitfields from the access.  */
10311	    || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10312		&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10313		&& mode1 != BLKmode
10314		&& bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10315	    /* If the field isn't aligned enough to fetch as a memref,
10316	       fetch it as a bit field.  */
10317	    || (mode1 != BLKmode
10318		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10319		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10320		      || (MEM_P (op0)
10321			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10322			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10323		     && modifier != EXPAND_MEMORY
10324		     && ((modifier == EXPAND_CONST_ADDRESS
10325			  || modifier == EXPAND_INITIALIZER)
10326			 ? STRICT_ALIGNMENT
10327			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10328		    || (bitpos % BITS_PER_UNIT != 0)))
10329	    /* If the type and the field are a constant size and the
10330	       size of the type isn't the same size as the bitfield,
10331	       we must use bitfield operations.  */
10332	    || (bitsize >= 0
10333		&& TYPE_SIZE (TREE_TYPE (exp))
10334		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10335		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10336					  bitsize)))
10337	  {
10338	    machine_mode ext_mode = mode;
10339
10340	    if (ext_mode == BLKmode
10341		&& ! (target != 0 && MEM_P (op0)
10342		      && MEM_P (target)
10343		      && bitpos % BITS_PER_UNIT == 0))
10344	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10345
10346	    if (ext_mode == BLKmode)
10347	      {
10348		if (target == 0)
10349		  target = assign_temp (type, 1, 1);
10350
10351		/* ??? Unlike the similar test a few lines below, this one is
10352		   very likely obsolete.  */
10353		if (bitsize == 0)
10354		  return target;
10355
10356		/* In this case, BITPOS must start at a byte boundary and
10357		   TARGET, if specified, must be a MEM.  */
10358		gcc_assert (MEM_P (op0)
10359			    && (!target || MEM_P (target))
10360			    && !(bitpos % BITS_PER_UNIT));
10361
10362		emit_block_move (target,
10363				 adjust_address (op0, VOIDmode,
10364						 bitpos / BITS_PER_UNIT),
10365				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10366					  / BITS_PER_UNIT),
10367				 (modifier == EXPAND_STACK_PARM
10368				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10369
10370		return target;
10371	      }
10372
10373	    /* If we have nothing to extract, the result will be 0 for targets
10374	       with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise.  Always
10375	       return 0 for the sake of consistency, as reading a zero-sized
10376	       bitfield is valid in Ada and the value is fully specified.  */
10377	    if (bitsize == 0)
10378	      return const0_rtx;
10379
10380	    op0 = validize_mem (op0);
10381
10382	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10383	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10384
10385	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10386				     (modifier == EXPAND_STACK_PARM
10387				      ? NULL_RTX : target),
10388				     ext_mode, ext_mode);
10389
10390	    /* If the result is a record type and BITSIZE is narrower than
10391	       the mode of OP0, an integral mode, and this is a big endian
10392	       machine, we must put the field into the high-order bits.  */
10393	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10394		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10395		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10396	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10397				  GET_MODE_BITSIZE (GET_MODE (op0))
10398				  - bitsize, op0, 1);
10399
10400	    /* If the result type is BLKmode, store the data into a temporary
10401	       of the appropriate type, but with the mode corresponding to the
10402	       mode for the data we have (op0's mode).  */
10403	    if (mode == BLKmode)
10404	      {
10405		rtx new_rtx
10406		  = assign_stack_temp_for_type (ext_mode,
10407						GET_MODE_BITSIZE (ext_mode),
10408						type);
10409		emit_move_insn (new_rtx, op0);
10410		op0 = copy_rtx (new_rtx);
10411		PUT_MODE (op0, BLKmode);
10412	      }
10413
10414	    return op0;
10415	  }
10416
10417	/* If the result is BLKmode, use that to access the object
10418	   now as well.  */
10419	if (mode == BLKmode)
10420	  mode1 = BLKmode;
10421
10422	/* Get a reference to just this component.  */
10423	if (modifier == EXPAND_CONST_ADDRESS
10424	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10425	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10426	else
10427	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10428
10429	if (op0 == orig_op0)
10430	  op0 = copy_rtx (op0);
10431
10432	set_mem_attributes (op0, exp, 0);
10433
10434	if (REG_P (XEXP (op0, 0)))
10435	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10436
10437	/* If op0 is a temporary because the original expressions was forced
10438	   to memory, clear MEM_EXPR so that the original expression cannot
10439	   be marked as addressable through MEM_EXPR of the temporary.  */
10440	if (clear_mem_expr)
10441	  set_mem_expr (op0, NULL_TREE);
10442
10443	MEM_VOLATILE_P (op0) |= volatilep;
10444	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10445	    || modifier == EXPAND_CONST_ADDRESS
10446	    || modifier == EXPAND_INITIALIZER)
10447	  return op0;
10448
10449	if (target == 0)
10450	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10451
10452	convert_move (target, op0, unsignedp);
10453	return target;
10454      }
10455
10456    case OBJ_TYPE_REF:
10457      return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10458
10459    case CALL_EXPR:
10460      /* All valid uses of __builtin_va_arg_pack () are removed during
10461	 inlining.  */
10462      if (CALL_EXPR_VA_ARG_PACK (exp))
10463	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10464      {
10465	tree fndecl = get_callee_fndecl (exp), attr;
10466
10467	if (fndecl
10468	    && (attr = lookup_attribute ("error",
10469					 DECL_ATTRIBUTES (fndecl))) != NULL)
10470	  error ("%Kcall to %qs declared with attribute error: %s",
10471		 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10472		 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10473	if (fndecl
10474	    && (attr = lookup_attribute ("warning",
10475					 DECL_ATTRIBUTES (fndecl))) != NULL)
10476	  warning_at (tree_nonartificial_location (exp),
10477		      0, "%Kcall to %qs declared with attribute warning: %s",
10478		      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10479		      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10480
10481	/* Check for a built-in function.  */
10482	if (fndecl && DECL_BUILT_IN (fndecl))
10483	  {
10484	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10485	    if (CALL_WITH_BOUNDS_P (exp))
10486	      return expand_builtin_with_bounds (exp, target, subtarget,
10487						 tmode, ignore);
10488	    else
10489	      return expand_builtin (exp, target, subtarget, tmode, ignore);
10490	  }
10491      }
10492      return expand_call (exp, target, ignore);
10493
10494    case VIEW_CONVERT_EXPR:
10495      op0 = NULL_RTX;
10496
10497      /* If we are converting to BLKmode, try to avoid an intermediate
10498	 temporary by fetching an inner memory reference.  */
10499      if (mode == BLKmode
10500	  && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10501	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10502	  && handled_component_p (treeop0))
10503      {
10504	machine_mode mode1;
10505	HOST_WIDE_INT bitsize, bitpos;
10506	tree offset;
10507	int unsignedp;
10508	int volatilep = 0;
10509	tree tem
10510	  = get_inner_reference (treeop0, &bitsize, &bitpos,
10511				 &offset, &mode1, &unsignedp, &volatilep,
10512				 true);
10513	rtx orig_op0;
10514
10515	/* ??? We should work harder and deal with non-zero offsets.  */
10516	if (!offset
10517	    && (bitpos % BITS_PER_UNIT) == 0
10518	    && bitsize >= 0
10519	    && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10520	  {
10521	    /* See the normal_inner_ref case for the rationale.  */
10522	    orig_op0
10523	      = expand_expr_real (tem,
10524				  (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10525				   && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10526				       != INTEGER_CST)
10527				   && modifier != EXPAND_STACK_PARM
10528				   ? target : NULL_RTX),
10529				  VOIDmode,
10530				  modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10531				  NULL, true);
10532
10533	    if (MEM_P (orig_op0))
10534	      {
10535		op0 = orig_op0;
10536
10537		/* Get a reference to just this component.  */
10538		if (modifier == EXPAND_CONST_ADDRESS
10539		    || modifier == EXPAND_SUM
10540		    || modifier == EXPAND_INITIALIZER)
10541		  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10542		else
10543		  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10544
10545		if (op0 == orig_op0)
10546		  op0 = copy_rtx (op0);
10547
10548		set_mem_attributes (op0, treeop0, 0);
10549		if (REG_P (XEXP (op0, 0)))
10550		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10551
10552		MEM_VOLATILE_P (op0) |= volatilep;
10553	      }
10554	  }
10555      }
10556
10557      if (!op0)
10558	op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10559				NULL, inner_reference_p);
10560
10561      /* If the input and output modes are both the same, we are done.  */
10562      if (mode == GET_MODE (op0))
10563	;
10564      /* If neither mode is BLKmode, and both modes are the same size
10565	 then we can use gen_lowpart.  */
10566      else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10567	       && (GET_MODE_PRECISION (mode)
10568		   == GET_MODE_PRECISION (GET_MODE (op0)))
10569	       && !COMPLEX_MODE_P (GET_MODE (op0)))
10570	{
10571	  if (GET_CODE (op0) == SUBREG)
10572	    op0 = force_reg (GET_MODE (op0), op0);
10573	  temp = gen_lowpart_common (mode, op0);
10574	  if (temp)
10575	    op0 = temp;
10576	  else
10577	    {
10578	      if (!REG_P (op0) && !MEM_P (op0))
10579		op0 = force_reg (GET_MODE (op0), op0);
10580	      op0 = gen_lowpart (mode, op0);
10581	    }
10582	}
10583      /* If both types are integral, convert from one mode to the other.  */
10584      else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10585	op0 = convert_modes (mode, GET_MODE (op0), op0,
10586			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10587      /* If the output type is a bit-field type, do an extraction.  */
10588      else if (reduce_bit_field)
10589	return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10590				  TYPE_UNSIGNED (type), NULL_RTX,
10591				  mode, mode);
10592      /* As a last resort, spill op0 to memory, and reload it in a
10593	 different mode.  */
10594      else if (!MEM_P (op0))
10595	{
10596	  /* If the operand is not a MEM, force it into memory.  Since we
10597	     are going to be changing the mode of the MEM, don't call
10598	     force_const_mem for constants because we don't allow pool
10599	     constants to change mode.  */
10600	  tree inner_type = TREE_TYPE (treeop0);
10601
10602	  gcc_assert (!TREE_ADDRESSABLE (exp));
10603
10604	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10605	    target
10606	      = assign_stack_temp_for_type
10607		(TYPE_MODE (inner_type),
10608		 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10609
10610	  emit_move_insn (target, op0);
10611	  op0 = target;
10612	}
10613
10614      /* If OP0 is (now) a MEM, we need to deal with alignment issues.  If the
10615	 output type is such that the operand is known to be aligned, indicate
10616	 that it is.  Otherwise, we need only be concerned about alignment for
10617	 non-BLKmode results.  */
10618      if (MEM_P (op0))
10619	{
10620	  enum insn_code icode;
10621
10622	  if (TYPE_ALIGN_OK (type))
10623	    {
10624	      /* ??? Copying the MEM without substantially changing it might
10625		 run afoul of the code handling volatile memory references in
10626		 store_expr, which assumes that TARGET is returned unmodified
10627		 if it has been used.  */
10628	      op0 = copy_rtx (op0);
10629	      set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10630	    }
10631	  else if (modifier != EXPAND_WRITE
10632		   && modifier != EXPAND_MEMORY
10633		   && !inner_reference_p
10634		   && mode != BLKmode
10635		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10636	    {
10637	      /* If the target does have special handling for unaligned
10638		 loads of mode then use them.  */
10639	      if ((icode = optab_handler (movmisalign_optab, mode))
10640		  != CODE_FOR_nothing)
10641		{
10642		  rtx reg, insn;
10643
10644		  op0 = adjust_address (op0, mode, 0);
10645		  /* We've already validated the memory, and we're creating a
10646		     new pseudo destination.  The predicates really can't
10647		     fail.  */
10648		  reg = gen_reg_rtx (mode);
10649
10650		  /* Nor can the insn generator.  */
10651		  insn = GEN_FCN (icode) (reg, op0);
10652		  emit_insn (insn);
10653		  return reg;
10654		}
10655	      else if (STRICT_ALIGNMENT)
10656		{
10657		  tree inner_type = TREE_TYPE (treeop0);
10658		  HOST_WIDE_INT temp_size
10659		    = MAX (int_size_in_bytes (inner_type),
10660			   (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10661		  rtx new_rtx
10662		    = assign_stack_temp_for_type (mode, temp_size, type);
10663		  rtx new_with_op0_mode
10664		    = adjust_address (new_rtx, GET_MODE (op0), 0);
10665
10666		  gcc_assert (!TREE_ADDRESSABLE (exp));
10667
10668		  if (GET_MODE (op0) == BLKmode)
10669		    emit_block_move (new_with_op0_mode, op0,
10670				     GEN_INT (GET_MODE_SIZE (mode)),
10671				     (modifier == EXPAND_STACK_PARM
10672				      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10673		  else
10674		    emit_move_insn (new_with_op0_mode, op0);
10675
10676		  op0 = new_rtx;
10677		}
10678	    }
10679
10680	  op0 = adjust_address (op0, mode, 0);
10681	}
10682
10683      return op0;
10684
10685    case MODIFY_EXPR:
10686      {
10687	tree lhs = treeop0;
10688	tree rhs = treeop1;
10689	gcc_assert (ignore);
10690
10691	/* Check for |= or &= of a bitfield of size one into another bitfield
10692	   of size 1.  In this case, (unless we need the result of the
10693	   assignment) we can do this more efficiently with a
10694	   test followed by an assignment, if necessary.
10695
10696	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
10697	   things change so we do, this code should be enhanced to
10698	   support it.  */
10699	if (TREE_CODE (lhs) == COMPONENT_REF
10700	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
10701		|| TREE_CODE (rhs) == BIT_AND_EXPR)
10702	    && TREE_OPERAND (rhs, 0) == lhs
10703	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10704	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10705	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10706	  {
10707	    rtx_code_label *label = gen_label_rtx ();
10708	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10709	    do_jump (TREE_OPERAND (rhs, 1),
10710		     value ? label : 0,
10711		     value ? 0 : label, -1);
10712	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10713			       false);
10714	    do_pending_stack_adjust ();
10715	    emit_label (label);
10716	    return const0_rtx;
10717	  }
10718
10719	expand_assignment (lhs, rhs, false);
10720	return const0_rtx;
10721      }
10722
10723    case ADDR_EXPR:
10724      return expand_expr_addr_expr (exp, target, tmode, modifier);
10725
10726    case REALPART_EXPR:
10727      op0 = expand_normal (treeop0);
10728      return read_complex_part (op0, false);
10729
10730    case IMAGPART_EXPR:
10731      op0 = expand_normal (treeop0);
10732      return read_complex_part (op0, true);
10733
10734    case RETURN_EXPR:
10735    case LABEL_EXPR:
10736    case GOTO_EXPR:
10737    case SWITCH_EXPR:
10738    case ASM_EXPR:
10739      /* Expanded in cfgexpand.c.  */
10740      gcc_unreachable ();
10741
10742    case TRY_CATCH_EXPR:
10743    case CATCH_EXPR:
10744    case EH_FILTER_EXPR:
10745    case TRY_FINALLY_EXPR:
10746      /* Lowered by tree-eh.c.  */
10747      gcc_unreachable ();
10748
10749    case WITH_CLEANUP_EXPR:
10750    case CLEANUP_POINT_EXPR:
10751    case TARGET_EXPR:
10752    case CASE_LABEL_EXPR:
10753    case VA_ARG_EXPR:
10754    case BIND_EXPR:
10755    case INIT_EXPR:
10756    case CONJ_EXPR:
10757    case COMPOUND_EXPR:
10758    case PREINCREMENT_EXPR:
10759    case PREDECREMENT_EXPR:
10760    case POSTINCREMENT_EXPR:
10761    case POSTDECREMENT_EXPR:
10762    case LOOP_EXPR:
10763    case EXIT_EXPR:
10764    case COMPOUND_LITERAL_EXPR:
10765      /* Lowered by gimplify.c.  */
10766      gcc_unreachable ();
10767
10768    case FDESC_EXPR:
10769      /* Function descriptors are not valid except for as
10770	 initialization constants, and should not be expanded.  */
10771      gcc_unreachable ();
10772
10773    case WITH_SIZE_EXPR:
10774      /* WITH_SIZE_EXPR expands to its first argument.  The caller should
10775	 have pulled out the size to use in whatever context it needed.  */
10776      return expand_expr_real (treeop0, original_target, tmode,
10777			       modifier, alt_rtl, inner_reference_p);
10778
10779    default:
10780      return expand_expr_real_2 (&ops, target, tmode, modifier);
10781    }
10782}
10783
10784/* Subroutine of above: reduce EXP to the precision of TYPE (in the
10785   signedness of TYPE), possibly returning the result in TARGET.  */
10786static rtx
10787reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10788{
10789  HOST_WIDE_INT prec = TYPE_PRECISION (type);
10790  if (target && GET_MODE (target) != GET_MODE (exp))
10791    target = 0;
10792  /* For constant values, reduce using build_int_cst_type. */
10793  if (CONST_INT_P (exp))
10794    {
10795      HOST_WIDE_INT value = INTVAL (exp);
10796      tree t = build_int_cst_type (type, value);
10797      return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10798    }
10799  else if (TYPE_UNSIGNED (type))
10800    {
10801      machine_mode mode = GET_MODE (exp);
10802      rtx mask = immed_wide_int_const
10803	(wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10804      return expand_and (mode, exp, mask, target);
10805    }
10806  else
10807    {
10808      int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10809      exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10810			  exp, count, target, 0);
10811      return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10812			   exp, count, target, 0);
10813    }
10814}
10815
10816/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10817   when applied to the address of EXP produces an address known to be
10818   aligned more than BIGGEST_ALIGNMENT.  */
10819
10820static int
10821is_aligning_offset (const_tree offset, const_tree exp)
10822{
10823  /* Strip off any conversions.  */
10824  while (CONVERT_EXPR_P (offset))
10825    offset = TREE_OPERAND (offset, 0);
10826
10827  /* We must now have a BIT_AND_EXPR with a constant that is one less than
10828     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
10829  if (TREE_CODE (offset) != BIT_AND_EXPR
10830      || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10831      || compare_tree_int (TREE_OPERAND (offset, 1),
10832			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10833      || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10834    return 0;
10835
10836  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10837     It must be NEGATE_EXPR.  Then strip any more conversions.  */
10838  offset = TREE_OPERAND (offset, 0);
10839  while (CONVERT_EXPR_P (offset))
10840    offset = TREE_OPERAND (offset, 0);
10841
10842  if (TREE_CODE (offset) != NEGATE_EXPR)
10843    return 0;
10844
10845  offset = TREE_OPERAND (offset, 0);
10846  while (CONVERT_EXPR_P (offset))
10847    offset = TREE_OPERAND (offset, 0);
10848
10849  /* This must now be the address of EXP.  */
10850  return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10851}
10852
10853/* Return the tree node if an ARG corresponds to a string constant or zero
10854   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
10855   in bytes within the string that ARG is accessing.  The type of the
10856   offset will be `sizetype'.  */
10857
10858tree
10859string_constant (tree arg, tree *ptr_offset)
10860{
10861  tree array, offset, lower_bound;
10862  STRIP_NOPS (arg);
10863
10864  if (TREE_CODE (arg) == ADDR_EXPR)
10865    {
10866      if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10867	{
10868	  *ptr_offset = size_zero_node;
10869	  return TREE_OPERAND (arg, 0);
10870	}
10871      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10872	{
10873	  array = TREE_OPERAND (arg, 0);
10874	  offset = size_zero_node;
10875	}
10876      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10877	{
10878	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10879	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10880	  if (TREE_CODE (array) != STRING_CST
10881	      && TREE_CODE (array) != VAR_DECL)
10882	    return 0;
10883
10884	  /* Check if the array has a nonzero lower bound.  */
10885	  lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10886	  if (!integer_zerop (lower_bound))
10887	    {
10888	      /* If the offset and base aren't both constants, return 0.  */
10889	      if (TREE_CODE (lower_bound) != INTEGER_CST)
10890	        return 0;
10891	      if (TREE_CODE (offset) != INTEGER_CST)
10892		return 0;
10893	      /* Adjust offset by the lower bound.  */
10894	      offset = size_diffop (fold_convert (sizetype, offset),
10895				    fold_convert (sizetype, lower_bound));
10896	    }
10897	}
10898      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10899	{
10900	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10901	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10902	  if (TREE_CODE (array) != ADDR_EXPR)
10903	    return 0;
10904	  array = TREE_OPERAND (array, 0);
10905	  if (TREE_CODE (array) != STRING_CST
10906	      && TREE_CODE (array) != VAR_DECL)
10907	    return 0;
10908	}
10909      else
10910	return 0;
10911    }
10912  else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10913    {
10914      tree arg0 = TREE_OPERAND (arg, 0);
10915      tree arg1 = TREE_OPERAND (arg, 1);
10916
10917      STRIP_NOPS (arg0);
10918      STRIP_NOPS (arg1);
10919
10920      if (TREE_CODE (arg0) == ADDR_EXPR
10921	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10922	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10923	{
10924	  array = TREE_OPERAND (arg0, 0);
10925	  offset = arg1;
10926	}
10927      else if (TREE_CODE (arg1) == ADDR_EXPR
10928	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10929		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10930	{
10931	  array = TREE_OPERAND (arg1, 0);
10932	  offset = arg0;
10933	}
10934      else
10935	return 0;
10936    }
10937  else
10938    return 0;
10939
10940  if (TREE_CODE (array) == STRING_CST)
10941    {
10942      *ptr_offset = fold_convert (sizetype, offset);
10943      return array;
10944    }
10945  else if (TREE_CODE (array) == VAR_DECL
10946	   || TREE_CODE (array) == CONST_DECL)
10947    {
10948      int length;
10949      tree init = ctor_for_folding (array);
10950
10951      /* Variables initialized to string literals can be handled too.  */
10952      if (init == error_mark_node
10953	  || !init
10954	  || TREE_CODE (init) != STRING_CST)
10955	return 0;
10956
10957      /* Avoid const char foo[4] = "abcde";  */
10958      if (DECL_SIZE_UNIT (array) == NULL_TREE
10959	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10960	  || (length = TREE_STRING_LENGTH (init)) <= 0
10961	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10962	return 0;
10963
10964      /* If variable is bigger than the string literal, OFFSET must be constant
10965	 and inside of the bounds of the string literal.  */
10966      offset = fold_convert (sizetype, offset);
10967      if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10968	  && (! tree_fits_uhwi_p (offset)
10969	      || compare_tree_int (offset, length) >= 0))
10970	return 0;
10971
10972      *ptr_offset = offset;
10973      return init;
10974    }
10975
10976  return 0;
10977}
10978
10979/* Generate code to calculate OPS, and exploded expression
10980   using a store-flag instruction and return an rtx for the result.
10981   OPS reflects a comparison.
10982
10983   If TARGET is nonzero, store the result there if convenient.
10984
10985   Return zero if there is no suitable set-flag instruction
10986   available on this machine.
10987
10988   Once expand_expr has been called on the arguments of the comparison,
10989   we are committed to doing the store flag, since it is not safe to
10990   re-evaluate the expression.  We emit the store-flag insn by calling
10991   emit_store_flag, but only expand the arguments if we have a reason
10992   to believe that emit_store_flag will be successful.  If we think that
10993   it will, but it isn't, we have to simulate the store-flag with a
10994   set/jump/set sequence.  */
10995
10996static rtx
10997do_store_flag (sepops ops, rtx target, machine_mode mode)
10998{
10999  enum rtx_code code;
11000  tree arg0, arg1, type;
11001  tree tem;
11002  machine_mode operand_mode;
11003  int unsignedp;
11004  rtx op0, op1;
11005  rtx subtarget = target;
11006  location_t loc = ops->location;
11007
11008  arg0 = ops->op0;
11009  arg1 = ops->op1;
11010
11011  /* Don't crash if the comparison was erroneous.  */
11012  if (arg0 == error_mark_node || arg1 == error_mark_node)
11013    return const0_rtx;
11014
11015  type = TREE_TYPE (arg0);
11016  operand_mode = TYPE_MODE (type);
11017  unsignedp = TYPE_UNSIGNED (type);
11018
11019  /* We won't bother with BLKmode store-flag operations because it would mean
11020     passing a lot of information to emit_store_flag.  */
11021  if (operand_mode == BLKmode)
11022    return 0;
11023
11024  /* We won't bother with store-flag operations involving function pointers
11025     when function pointers must be canonicalized before comparisons.  */
11026#ifdef HAVE_canonicalize_funcptr_for_compare
11027  if (HAVE_canonicalize_funcptr_for_compare
11028      && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11029	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11030	       == FUNCTION_TYPE))
11031	  || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11032	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11033		  == FUNCTION_TYPE))))
11034    return 0;
11035#endif
11036
11037  STRIP_NOPS (arg0);
11038  STRIP_NOPS (arg1);
11039
11040  /* For vector typed comparisons emit code to generate the desired
11041     all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
11042     expander for this.  */
11043  if (TREE_CODE (ops->type) == VECTOR_TYPE)
11044    {
11045      tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11046      tree if_true = constant_boolean_node (true, ops->type);
11047      tree if_false = constant_boolean_node (false, ops->type);
11048      return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11049    }
11050
11051  /* Get the rtx comparison code to use.  We know that EXP is a comparison
11052     operation of some type.  Some comparisons against 1 and -1 can be
11053     converted to comparisons with zero.  Do so here so that the tests
11054     below will be aware that we have a comparison with zero.   These
11055     tests will not catch constants in the first operand, but constants
11056     are rarely passed as the first operand.  */
11057
11058  switch (ops->code)
11059    {
11060    case EQ_EXPR:
11061      code = EQ;
11062      break;
11063    case NE_EXPR:
11064      code = NE;
11065      break;
11066    case LT_EXPR:
11067      if (integer_onep (arg1))
11068	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11069      else
11070	code = unsignedp ? LTU : LT;
11071      break;
11072    case LE_EXPR:
11073      if (! unsignedp && integer_all_onesp (arg1))
11074	arg1 = integer_zero_node, code = LT;
11075      else
11076	code = unsignedp ? LEU : LE;
11077      break;
11078    case GT_EXPR:
11079      if (! unsignedp && integer_all_onesp (arg1))
11080	arg1 = integer_zero_node, code = GE;
11081      else
11082	code = unsignedp ? GTU : GT;
11083      break;
11084    case GE_EXPR:
11085      if (integer_onep (arg1))
11086	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11087      else
11088	code = unsignedp ? GEU : GE;
11089      break;
11090
11091    case UNORDERED_EXPR:
11092      code = UNORDERED;
11093      break;
11094    case ORDERED_EXPR:
11095      code = ORDERED;
11096      break;
11097    case UNLT_EXPR:
11098      code = UNLT;
11099      break;
11100    case UNLE_EXPR:
11101      code = UNLE;
11102      break;
11103    case UNGT_EXPR:
11104      code = UNGT;
11105      break;
11106    case UNGE_EXPR:
11107      code = UNGE;
11108      break;
11109    case UNEQ_EXPR:
11110      code = UNEQ;
11111      break;
11112    case LTGT_EXPR:
11113      code = LTGT;
11114      break;
11115
11116    default:
11117      gcc_unreachable ();
11118    }
11119
11120  /* Put a constant second.  */
11121  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11122      || TREE_CODE (arg0) == FIXED_CST)
11123    {
11124      tem = arg0; arg0 = arg1; arg1 = tem;
11125      code = swap_condition (code);
11126    }
11127
11128  /* If this is an equality or inequality test of a single bit, we can
11129     do this by shifting the bit being tested to the low-order bit and
11130     masking the result with the constant 1.  If the condition was EQ,
11131     we xor it with 1.  This does not require an scc insn and is faster
11132     than an scc insn even if we have it.
11133
11134     The code to make this transformation was moved into fold_single_bit_test,
11135     so we just call into the folder and expand its result.  */
11136
11137  if ((code == NE || code == EQ)
11138      && integer_zerop (arg1)
11139      && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11140    {
11141      gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11142      if (srcstmt
11143	  && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11144	{
11145	  enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11146	  tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11147	  tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11148				       gimple_assign_rhs1 (srcstmt),
11149				       gimple_assign_rhs2 (srcstmt));
11150	  temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11151	  if (temp)
11152	    return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11153	}
11154    }
11155
11156  if (! get_subtarget (target)
11157      || GET_MODE (subtarget) != operand_mode)
11158    subtarget = 0;
11159
11160  expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11161
11162  if (target == 0)
11163    target = gen_reg_rtx (mode);
11164
11165  /* Try a cstore if possible.  */
11166  return emit_store_flag_force (target, code, op0, op1,
11167				operand_mode, unsignedp,
11168				(TYPE_PRECISION (ops->type) == 1
11169				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11170}
11171
11172
11173/* Stubs in case we haven't got a casesi insn.  */
11174#ifndef HAVE_casesi
11175# define HAVE_casesi 0
11176# define gen_casesi(a, b, c, d, e) (0)
11177# define CODE_FOR_casesi CODE_FOR_nothing
11178#endif
11179
11180/* Attempt to generate a casesi instruction.  Returns 1 if successful,
11181   0 otherwise (i.e. if there is no casesi instruction).
11182
11183   DEFAULT_PROBABILITY is the probability of jumping to the default
11184   label.  */
11185int
11186try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11187	    rtx table_label, rtx default_label, rtx fallback_label,
11188            int default_probability)
11189{
11190  struct expand_operand ops[5];
11191  machine_mode index_mode = SImode;
11192  rtx op1, op2, index;
11193
11194  if (! HAVE_casesi)
11195    return 0;
11196
11197  /* Convert the index to SImode.  */
11198  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11199    {
11200      machine_mode omode = TYPE_MODE (index_type);
11201      rtx rangertx = expand_normal (range);
11202
11203      /* We must handle the endpoints in the original mode.  */
11204      index_expr = build2 (MINUS_EXPR, index_type,
11205			   index_expr, minval);
11206      minval = integer_zero_node;
11207      index = expand_normal (index_expr);
11208      if (default_label)
11209        emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11210				 omode, 1, default_label,
11211                                 default_probability);
11212      /* Now we can safely truncate.  */
11213      index = convert_to_mode (index_mode, index, 0);
11214    }
11215  else
11216    {
11217      if (TYPE_MODE (index_type) != index_mode)
11218	{
11219	  index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11220	  index_expr = fold_convert (index_type, index_expr);
11221	}
11222
11223      index = expand_normal (index_expr);
11224    }
11225
11226  do_pending_stack_adjust ();
11227
11228  op1 = expand_normal (minval);
11229  op2 = expand_normal (range);
11230
11231  create_input_operand (&ops[0], index, index_mode);
11232  create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11233  create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11234  create_fixed_operand (&ops[3], table_label);
11235  create_fixed_operand (&ops[4], (default_label
11236				  ? default_label
11237				  : fallback_label));
11238  expand_jump_insn (CODE_FOR_casesi, 5, ops);
11239  return 1;
11240}
11241
11242/* Attempt to generate a tablejump instruction; same concept.  */
11243#ifndef HAVE_tablejump
11244#define HAVE_tablejump 0
11245#define gen_tablejump(x, y) (0)
11246#endif
11247
11248/* Subroutine of the next function.
11249
11250   INDEX is the value being switched on, with the lowest value
11251   in the table already subtracted.
11252   MODE is its expected mode (needed if INDEX is constant).
11253   RANGE is the length of the jump table.
11254   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11255
11256   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11257   index value is out of range.
11258   DEFAULT_PROBABILITY is the probability of jumping to
11259   the default label.  */
11260
11261static void
11262do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11263	      rtx default_label, int default_probability)
11264{
11265  rtx temp, vector;
11266
11267  if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11268    cfun->cfg->max_jumptable_ents = INTVAL (range);
11269
11270  /* Do an unsigned comparison (in the proper mode) between the index
11271     expression and the value which represents the length of the range.
11272     Since we just finished subtracting the lower bound of the range
11273     from the index expression, this comparison allows us to simultaneously
11274     check that the original index expression value is both greater than
11275     or equal to the minimum value of the range and less than or equal to
11276     the maximum value of the range.  */
11277
11278  if (default_label)
11279    emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11280			     default_label, default_probability);
11281
11282
11283  /* If index is in range, it must fit in Pmode.
11284     Convert to Pmode so we can index with it.  */
11285  if (mode != Pmode)
11286    index = convert_to_mode (Pmode, index, 1);
11287
11288  /* Don't let a MEM slip through, because then INDEX that comes
11289     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11290     and break_out_memory_refs will go to work on it and mess it up.  */
11291#ifdef PIC_CASE_VECTOR_ADDRESS
11292  if (flag_pic && !REG_P (index))
11293    index = copy_to_mode_reg (Pmode, index);
11294#endif
11295
11296  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11297     GET_MODE_SIZE, because this indicates how large insns are.  The other
11298     uses should all be Pmode, because they are addresses.  This code
11299     could fail if addresses and insns are not the same size.  */
11300  index = simplify_gen_binary (MULT, Pmode, index,
11301			       gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11302					     Pmode));
11303  index = simplify_gen_binary (PLUS, Pmode, index,
11304			       gen_rtx_LABEL_REF (Pmode, table_label));
11305
11306#ifdef PIC_CASE_VECTOR_ADDRESS
11307  if (flag_pic)
11308    index = PIC_CASE_VECTOR_ADDRESS (index);
11309  else
11310#endif
11311    index = memory_address (CASE_VECTOR_MODE, index);
11312  temp = gen_reg_rtx (CASE_VECTOR_MODE);
11313  vector = gen_const_mem (CASE_VECTOR_MODE, index);
11314  convert_move (temp, vector, 0);
11315
11316  emit_jump_insn (gen_tablejump (temp, table_label));
11317
11318  /* If we are generating PIC code or if the table is PC-relative, the
11319     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11320  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11321    emit_barrier ();
11322}
11323
11324int
11325try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11326	       rtx table_label, rtx default_label, int default_probability)
11327{
11328  rtx index;
11329
11330  if (! HAVE_tablejump)
11331    return 0;
11332
11333  index_expr = fold_build2 (MINUS_EXPR, index_type,
11334			    fold_convert (index_type, index_expr),
11335			    fold_convert (index_type, minval));
11336  index = expand_normal (index_expr);
11337  do_pending_stack_adjust ();
11338
11339  do_tablejump (index, TYPE_MODE (index_type),
11340		convert_modes (TYPE_MODE (index_type),
11341			       TYPE_MODE (TREE_TYPE (range)),
11342			       expand_normal (range),
11343			       TYPE_UNSIGNED (TREE_TYPE (range))),
11344		table_label, default_label, default_probability);
11345  return 1;
11346}
11347
11348/* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
11349static rtx
11350const_vector_from_tree (tree exp)
11351{
11352  rtvec v;
11353  unsigned i;
11354  int units;
11355  tree elt;
11356  machine_mode inner, mode;
11357
11358  mode = TYPE_MODE (TREE_TYPE (exp));
11359
11360  if (initializer_zerop (exp))
11361    return CONST0_RTX (mode);
11362
11363  units = GET_MODE_NUNITS (mode);
11364  inner = GET_MODE_INNER (mode);
11365
11366  v = rtvec_alloc (units);
11367
11368  for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11369    {
11370      elt = VECTOR_CST_ELT (exp, i);
11371
11372      if (TREE_CODE (elt) == REAL_CST)
11373	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11374							 inner);
11375      else if (TREE_CODE (elt) == FIXED_CST)
11376	RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11377							 inner);
11378      else
11379	RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11380    }
11381
11382  return gen_rtx_CONST_VECTOR (mode, v);
11383}
11384
11385/* Build a decl for a personality function given a language prefix.  */
11386
11387tree
11388build_personality_function (const char *lang)
11389{
11390  const char *unwind_and_version;
11391  tree decl, type;
11392  char *name;
11393
11394  switch (targetm_common.except_unwind_info (&global_options))
11395    {
11396    case UI_NONE:
11397      return NULL;
11398    case UI_SJLJ:
11399      unwind_and_version = "_sj0";
11400      break;
11401    case UI_DWARF2:
11402    case UI_TARGET:
11403      unwind_and_version = "_v0";
11404      break;
11405    case UI_SEH:
11406      unwind_and_version = "_seh0";
11407      break;
11408    default:
11409      gcc_unreachable ();
11410    }
11411
11412  name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11413
11414  type = build_function_type_list (integer_type_node, integer_type_node,
11415				   long_long_unsigned_type_node,
11416				   ptr_type_node, ptr_type_node, NULL_TREE);
11417  decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11418		     get_identifier (name), type);
11419  DECL_ARTIFICIAL (decl) = 1;
11420  DECL_EXTERNAL (decl) = 1;
11421  TREE_PUBLIC (decl) = 1;
11422
11423  /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
11424     are the flags assigned by targetm.encode_section_info.  */
11425  SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11426
11427  return decl;
11428}
11429
11430/* Extracts the personality function of DECL and returns the corresponding
11431   libfunc.  */
11432
11433rtx
11434get_personality_function (tree decl)
11435{
11436  tree personality = DECL_FUNCTION_PERSONALITY (decl);
11437  enum eh_personality_kind pk;
11438
11439  pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11440  if (pk == eh_personality_none)
11441    return NULL;
11442
11443  if (!personality
11444      && pk == eh_personality_any)
11445    personality = lang_hooks.eh_personality ();
11446
11447  if (pk == eh_personality_lang)
11448    gcc_assert (personality != NULL_TREE);
11449
11450  return XEXP (DECL_RTL (personality), 0);
11451}
11452
11453/* Returns a tree for the size of EXP in bytes.  */
11454
11455static tree
11456tree_expr_size (const_tree exp)
11457{
11458  if (DECL_P (exp)
11459      && DECL_SIZE_UNIT (exp) != 0)
11460    return DECL_SIZE_UNIT (exp);
11461  else
11462    return size_in_bytes (TREE_TYPE (exp));
11463}
11464
11465/* Return an rtx for the size in bytes of the value of EXP.  */
11466
11467rtx
11468expr_size (tree exp)
11469{
11470  tree size;
11471
11472  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11473    size = TREE_OPERAND (exp, 1);
11474  else
11475    {
11476      size = tree_expr_size (exp);
11477      gcc_assert (size);
11478      gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11479    }
11480
11481  return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11482}
11483
11484/* Return a wide integer for the size in bytes of the value of EXP, or -1
11485   if the size can vary or is larger than an integer.  */
11486
11487static HOST_WIDE_INT
11488int_expr_size (tree exp)
11489{
11490  tree size;
11491
11492  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11493    size = TREE_OPERAND (exp, 1);
11494  else
11495    {
11496      size = tree_expr_size (exp);
11497      gcc_assert (size);
11498    }
11499
11500  if (size == 0 || !tree_fits_shwi_p (size))
11501    return -1;
11502
11503  return tree_to_shwi (size);
11504}
11505
11506#include "gt-expr.h"
11507