1/* Subroutines used for code generation on Renesas RX processors.
2   Copyright (C) 2008-2015 Free Software Foundation, Inc.
3   Contributed by Red Hat.
4
5   This file is part of GCC.
6
7   GCC is free software; you can redistribute it and/or modify
8   it under the terms of the GNU General Public License as published by
9   the Free Software Foundation; either version 3, or (at your option)
10   any later version.
11
12   GCC is distributed in the hope that it will be useful,
13   but WITHOUT ANY WARRANTY; without even the implied warranty of
14   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15   GNU General Public License for more details.
16
17   You should have received a copy of the GNU General Public License
18   along with GCC; see the file COPYING3.  If not see
19   <http://www.gnu.org/licenses/>.  */
20
21/* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload.  */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "hash-set.h"
30#include "machmode.h"
31#include "vec.h"
32#include "double-int.h"
33#include "input.h"
34#include "alias.h"
35#include "symtab.h"
36#include "wide-int.h"
37#include "inchash.h"
38#include "tree.h"
39#include "varasm.h"
40#include "stor-layout.h"
41#include "calls.h"
42#include "rtl.h"
43#include "regs.h"
44#include "hard-reg-set.h"
45#include "insn-config.h"
46#include "conditions.h"
47#include "output.h"
48#include "insn-attr.h"
49#include "flags.h"
50#include "function.h"
51#include "hashtab.h"
52#include "statistics.h"
53#include "real.h"
54#include "fixed-value.h"
55#include "expmed.h"
56#include "dojump.h"
57#include "explow.h"
58#include "emit-rtl.h"
59#include "stmt.h"
60#include "expr.h"
61#include "insn-codes.h"
62#include "optabs.h"
63#include "libfuncs.h"
64#include "recog.h"
65#include "diagnostic-core.h"
66#include "toplev.h"
67#include "reload.h"
68#include "dominance.h"
69#include "cfg.h"
70#include "cfgrtl.h"
71#include "cfganal.h"
72#include "lcm.h"
73#include "cfgbuild.h"
74#include "cfgcleanup.h"
75#include "predict.h"
76#include "basic-block.h"
77#include "df.h"
78#include "ggc.h"
79#include "tm_p.h"
80#include "debug.h"
81#include "target.h"
82#include "target-def.h"
83#include "langhooks.h"
84#include "opts.h"
85#include "hash-map.h"
86#include "is-a.h"
87#include "plugin-api.h"
88#include "ipa-ref.h"
89#include "cgraph.h"
90#include "builtins.h"
91
92static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
93static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
94static unsigned int rx_num_interrupt_regs;
95
96static unsigned int
97rx_gp_base_regnum (void)
98{
99  if (rx_gp_base_regnum_val == INVALID_REGNUM)
100    gcc_unreachable ();
101  return rx_gp_base_regnum_val;
102}
103
104static unsigned int
105rx_pid_base_regnum (void)
106{
107  if (rx_pid_base_regnum_val == INVALID_REGNUM)
108    gcc_unreachable ();
109  return rx_pid_base_regnum_val;
110}
111
112/* Find a SYMBOL_REF in a "standard" MEM address and return its decl.  */
113
114static tree
115rx_decl_for_addr (rtx op)
116{
117  if (GET_CODE (op) == MEM)
118    op = XEXP (op, 0);
119  if (GET_CODE (op) == CONST)
120    op = XEXP (op, 0);
121  while (GET_CODE (op) == PLUS)
122    op = XEXP (op, 0);
123  if (GET_CODE (op) == SYMBOL_REF)
124    return SYMBOL_REF_DECL (op);
125  return NULL_TREE;
126}
127
128static void rx_print_operand (FILE *, rtx, int);
129
130#define CC_FLAG_S	(1 << 0)
131#define CC_FLAG_Z	(1 << 1)
132#define CC_FLAG_O	(1 << 2)
133#define CC_FLAG_C	(1 << 3)
134#define CC_FLAG_FP	(1 << 4)	/* Fake, to differentiate CC_Fmode.  */
135
136static unsigned int flags_from_mode (machine_mode mode);
137static unsigned int flags_from_code (enum rtx_code code);
138
139/* Return true if OP is a reference to an object in a PID data area.  */
140
141enum pid_type
142{
143  PID_NOT_PID = 0,	/* The object is not in the PID data area.  */
144  PID_ENCODED,		/* The object is in the PID data area.  */
145  PID_UNENCODED		/* The object will be placed in the PID data area, but it has not been placed there yet.  */
146};
147
148static enum pid_type
149rx_pid_data_operand (rtx op)
150{
151  tree op_decl;
152
153  if (!TARGET_PID)
154    return PID_NOT_PID;
155
156  if (GET_CODE (op) == PLUS
157      && GET_CODE (XEXP (op, 0)) == REG
158      && GET_CODE (XEXP (op, 1)) == CONST
159      && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
160    return PID_ENCODED;
161
162  op_decl = rx_decl_for_addr (op);
163
164  if (op_decl)
165    {
166      if (TREE_READONLY (op_decl))
167	return PID_UNENCODED;
168    }
169  else
170    {
171      /* Sigh, some special cases.  */
172      if (GET_CODE (op) == SYMBOL_REF
173	  || GET_CODE (op) == LABEL_REF)
174	return PID_UNENCODED;
175    }
176
177  return PID_NOT_PID;
178}
179
180static rtx
181rx_legitimize_address (rtx x,
182		       rtx oldx ATTRIBUTE_UNUSED,
183		       machine_mode mode ATTRIBUTE_UNUSED)
184{
185  if (rx_pid_data_operand (x) == PID_UNENCODED)
186    {
187      rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
188      return rv;
189    }
190
191  if (GET_CODE (x) == PLUS
192      && GET_CODE (XEXP (x, 0)) == PLUS
193      && REG_P (XEXP (XEXP (x, 0), 0))
194      && REG_P (XEXP (x, 1)))
195    return force_reg (SImode, x);
196
197  return x;
198}
199
200/* Return true if OP is a reference to an object in a small data area.  */
201
202static bool
203rx_small_data_operand (rtx op)
204{
205  if (rx_small_data_limit == 0)
206    return false;
207
208  if (GET_CODE (op) == SYMBOL_REF)
209    return SYMBOL_REF_SMALL_P (op);
210
211  return false;
212}
213
214static bool
215rx_is_legitimate_address (machine_mode mode, rtx x,
216			  bool strict ATTRIBUTE_UNUSED)
217{
218  if (RTX_OK_FOR_BASE (x, strict))
219    /* Register Indirect.  */
220    return true;
221
222  if ((GET_MODE_SIZE (mode) == 4
223       || GET_MODE_SIZE (mode) == 2
224       || GET_MODE_SIZE (mode) == 1)
225      && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
226    /* Pre-decrement Register Indirect or
227       Post-increment Register Indirect.  */
228    return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
229
230  switch (rx_pid_data_operand (x))
231    {
232    case PID_UNENCODED:
233      return false;
234    case PID_ENCODED:
235      return true;
236    default:
237      break;
238    }
239
240  if (GET_CODE (x) == PLUS)
241    {
242      rtx arg1 = XEXP (x, 0);
243      rtx arg2 = XEXP (x, 1);
244      rtx index = NULL_RTX;
245
246      if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
247	index = arg2;
248      else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
249	index = arg1;
250      else
251	return false;
252
253      switch (GET_CODE (index))
254	{
255	case CONST_INT:
256	  {
257	    /* Register Relative: REG + INT.
258	       Only positive, mode-aligned, mode-sized
259	       displacements are allowed.  */
260	    HOST_WIDE_INT val = INTVAL (index);
261	    int factor;
262
263	    if (val < 0)
264	      return false;
265
266	    switch (GET_MODE_SIZE (mode))
267	      {
268	      default:
269	      case 4: factor = 4; break;
270	      case 2: factor = 2; break;
271	      case 1: factor = 1; break;
272	      }
273
274	    if (val > (65535 * factor))
275	      return false;
276	    return (val % factor) == 0;
277	  }
278
279	case REG:
280	  /* Unscaled Indexed Register Indirect: REG + REG
281	     Size has to be "QI", REG has to be valid.  */
282	  return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
283
284	case MULT:
285	  {
286	    /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
287	       Factor has to equal the mode size, REG has to be valid.  */
288	    rtx factor;
289
290	    factor = XEXP (index, 1);
291	    index = XEXP (index, 0);
292
293	    return REG_P (index)
294	      && RTX_OK_FOR_BASE (index, strict)
295	      && CONST_INT_P (factor)
296	      && GET_MODE_SIZE (mode) == INTVAL (factor);
297	  }
298
299	default:
300	  return false;
301	}
302    }
303
304  /* Small data area accesses turn into register relative offsets.  */
305  return rx_small_data_operand (x);
306}
307
308/* Returns TRUE for simple memory addreses, ie ones
309   that do not involve register indirect addressing
310   or pre/post increment/decrement.  */
311
312bool
313rx_is_restricted_memory_address (rtx mem, machine_mode mode)
314{
315  if (! rx_is_legitimate_address
316      (mode, mem, reload_in_progress || reload_completed))
317    return false;
318
319  switch (GET_CODE (mem))
320    {
321    case REG:
322      /* Simple memory addresses are OK.  */
323      return true;
324
325    case PRE_DEC:
326    case POST_INC:
327      return false;
328
329    case PLUS:
330      {
331	rtx base, index;
332
333	/* Only allow REG+INT addressing.  */
334	base = XEXP (mem, 0);
335	index = XEXP (mem, 1);
336
337	if (! RX_REG_P (base) || ! CONST_INT_P (index))
338	  return false;
339
340	return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
341      }
342
343    case SYMBOL_REF:
344      /* Can happen when small data is being supported.
345         Assume that it will be resolved into GP+INT.  */
346      return true;
347
348    default:
349      gcc_unreachable ();
350    }
351}
352
353/* Implement TARGET_MODE_DEPENDENT_ADDRESS_P.  */
354
355static bool
356rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
357{
358  if (GET_CODE (addr) == CONST)
359    addr = XEXP (addr, 0);
360
361  switch (GET_CODE (addr))
362    {
363      /* --REG and REG++ only work in SImode.  */
364    case PRE_DEC:
365    case POST_INC:
366      return true;
367
368    case MINUS:
369    case PLUS:
370      if (! REG_P (XEXP (addr, 0)))
371	return true;
372
373      addr = XEXP (addr, 1);
374
375      switch (GET_CODE (addr))
376	{
377	case REG:
378	  /* REG+REG only works in SImode.  */
379	  return true;
380
381	case CONST_INT:
382	  /* REG+INT is only mode independent if INT is a
383	     multiple of 4, positive and will fit into 16-bits.  */
384	  if (((INTVAL (addr) & 3) == 0)
385	      && IN_RANGE (INTVAL (addr), 4, 0xfffc))
386	    return false;
387	  return true;
388
389	case SYMBOL_REF:
390	case LABEL_REF:
391	  return true;
392
393	case MULT:
394	  gcc_assert (REG_P (XEXP (addr, 0)));
395	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
396	  /* REG+REG*SCALE is always mode dependent.  */
397	  return true;
398
399	default:
400	  /* Not recognized, so treat as mode dependent.  */
401	  return true;
402	}
403
404    case CONST_INT:
405    case SYMBOL_REF:
406    case LABEL_REF:
407    case REG:
408      /* These are all mode independent.  */
409      return false;
410
411    default:
412      /* Everything else is unrecognized,
413	 so treat as mode dependent.  */
414      return true;
415    }
416}
417
418/* A C compound statement to output to stdio stream FILE the
419   assembler syntax for an instruction operand that is a memory
420   reference whose address is ADDR.  */
421
422static void
423rx_print_operand_address (FILE * file, rtx addr)
424{
425  switch (GET_CODE (addr))
426    {
427    case REG:
428      fprintf (file, "[");
429      rx_print_operand (file, addr, 0);
430      fprintf (file, "]");
431      break;
432
433    case PRE_DEC:
434      fprintf (file, "[-");
435      rx_print_operand (file, XEXP (addr, 0), 0);
436      fprintf (file, "]");
437      break;
438
439    case POST_INC:
440      fprintf (file, "[");
441      rx_print_operand (file, XEXP (addr, 0), 0);
442      fprintf (file, "+]");
443      break;
444
445    case PLUS:
446      {
447	rtx arg1 = XEXP (addr, 0);
448	rtx arg2 = XEXP (addr, 1);
449	rtx base, index;
450
451	if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
452	  base = arg1, index = arg2;
453	else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
454	  base = arg2, index = arg1;
455	else
456	  {
457	    rx_print_operand (file, arg1, 0);
458	    fprintf (file, " + ");
459	    rx_print_operand (file, arg2, 0);
460	    break;
461	  }
462
463	if (REG_P (index) || GET_CODE (index) == MULT)
464	  {
465	    fprintf (file, "[");
466	    rx_print_operand (file, index, 'A');
467	    fprintf (file, ",");
468	  }
469	else /* GET_CODE (index) == CONST_INT  */
470	  {
471	    rx_print_operand (file, index, 'A');
472	    fprintf (file, "[");
473	  }
474	rx_print_operand (file, base, 0);
475	fprintf (file, "]");
476	break;
477      }
478
479    case CONST:
480      if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
481	{
482	  addr = XEXP (addr, 0);
483	  gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
484
485	  /* FIXME: Putting this case label here is an appalling abuse of the C language.  */
486	case UNSPEC:
487          addr = XVECEXP (addr, 0, 0);
488	  gcc_assert (CONST_INT_P (addr));
489	}
490      /* Fall through.  */
491    case LABEL_REF:
492    case SYMBOL_REF:
493      fprintf (file, "#");
494      /* Fall through.  */
495    default:
496      output_addr_const (file, addr);
497      break;
498    }
499}
500
501static void
502rx_print_integer (FILE * file, HOST_WIDE_INT val)
503{
504  if (IN_RANGE (val, -64, 64))
505    fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
506  else
507    fprintf (file,
508	     TARGET_AS100_SYNTAX
509	     ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
510	     val);
511}
512
513static bool
514rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
515{
516  const char *  op = integer_asm_op (size, is_aligned);
517
518  if (! CONST_INT_P (x))
519    return default_assemble_integer (x, size, is_aligned);
520
521  if (op == NULL)
522    return false;
523  fputs (op, asm_out_file);
524
525  rx_print_integer (asm_out_file, INTVAL (x));
526  fputc ('\n', asm_out_file);
527  return true;
528}
529
530
531/* Handles the insertion of a single operand into the assembler output.
532   The %<letter> directives supported are:
533
534     %A  Print an operand without a leading # character.
535     %B  Print an integer comparison name.
536     %C  Print a control register name.
537     %F  Print a condition code flag name.
538     %G  Register used for small-data-area addressing
539     %H  Print high part of a DImode register, integer or address.
540     %L  Print low part of a DImode register, integer or address.
541     %N  Print the negation of the immediate value.
542     %P  Register used for PID addressing
543     %Q  If the operand is a MEM, then correctly generate
544         register indirect or register relative addressing.
545     %R  Like %Q but for zero-extending loads.  */
546
547static void
548rx_print_operand (FILE * file, rtx op, int letter)
549{
550  bool unsigned_load = false;
551  bool print_hash = true;
552
553  if (letter == 'A'
554      && ((GET_CODE (op) == CONST
555	   && GET_CODE (XEXP (op, 0)) == UNSPEC)
556	  || GET_CODE (op) == UNSPEC))
557    {
558      print_hash = false;
559      letter = 0;
560    }
561
562  switch (letter)
563    {
564    case 'A':
565      /* Print an operand without a leading #.  */
566      if (MEM_P (op))
567	op = XEXP (op, 0);
568
569      switch (GET_CODE (op))
570	{
571	case LABEL_REF:
572	case SYMBOL_REF:
573	  output_addr_const (file, op);
574	  break;
575	case CONST_INT:
576	  fprintf (file, "%ld", (long) INTVAL (op));
577	  break;
578	default:
579	  rx_print_operand (file, op, 0);
580	  break;
581	}
582      break;
583
584    case 'B':
585      {
586	enum rtx_code code = GET_CODE (op);
587	machine_mode mode = GET_MODE (XEXP (op, 0));
588	const char *ret;
589
590	if (mode == CC_Fmode)
591	  {
592	    /* C flag is undefined, and O flag carries unordered.  None of the
593	       branch combinations that include O use it helpfully.  */
594	    switch (code)
595	      {
596	      case ORDERED:
597		ret = "no";
598		break;
599	      case UNORDERED:
600		ret = "o";
601		break;
602	      case LT:
603		ret = "n";
604		break;
605	      case GE:
606		ret = "pz";
607		break;
608	      case EQ:
609		ret = "eq";
610		break;
611	      case NE:
612		ret = "ne";
613		break;
614	      default:
615		gcc_unreachable ();
616	      }
617	  }
618	else
619	  {
620	    unsigned int flags = flags_from_mode (mode);
621
622	    switch (code)
623	      {
624	      case LT:
625		ret = (flags & CC_FLAG_O ? "lt" : "n");
626		break;
627	      case GE:
628		ret = (flags & CC_FLAG_O ? "ge" : "pz");
629		break;
630	      case GT:
631		ret = "gt";
632		break;
633	      case LE:
634		ret = "le";
635		break;
636	      case GEU:
637		ret = "geu";
638		break;
639	      case LTU:
640		ret = "ltu";
641		break;
642	      case GTU:
643		ret = "gtu";
644		break;
645	      case LEU:
646		ret = "leu";
647		break;
648	      case EQ:
649		ret = "eq";
650		break;
651	      case NE:
652		ret = "ne";
653		break;
654	      default:
655		gcc_unreachable ();
656	      }
657	    gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
658	  }
659	fputs (ret, file);
660	break;
661      }
662
663    case 'C':
664      gcc_assert (CONST_INT_P (op));
665      switch (INTVAL (op))
666	{
667	case 0:   fprintf (file, "psw"); break;
668	case 2:   fprintf (file, "usp"); break;
669	case 3:   fprintf (file, "fpsw"); break;
670	case 4:   fprintf (file, "cpen"); break;
671	case 8:   fprintf (file, "bpsw"); break;
672	case 9:   fprintf (file, "bpc"); break;
673	case 0xa: fprintf (file, "isp"); break;
674	case 0xb: fprintf (file, "fintv"); break;
675	case 0xc: fprintf (file, "intb"); break;
676	default:
677	  warning (0, "unrecognized control register number: %d - using 'psw'",
678		   (int) INTVAL (op));
679	  fprintf (file, "psw");
680	  break;
681	}
682      break;
683
684    case 'F':
685      gcc_assert (CONST_INT_P (op));
686      switch (INTVAL (op))
687	{
688	case 0: case 'c': case 'C': fprintf (file, "C"); break;
689	case 1:	case 'z': case 'Z': fprintf (file, "Z"); break;
690	case 2: case 's': case 'S': fprintf (file, "S"); break;
691	case 3: case 'o': case 'O': fprintf (file, "O"); break;
692	case 8: case 'i': case 'I': fprintf (file, "I"); break;
693	case 9: case 'u': case 'U': fprintf (file, "U"); break;
694	default:
695	  gcc_unreachable ();
696	}
697      break;
698
699    case 'G':
700      fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
701      break;
702
703    case 'H':
704      switch (GET_CODE (op))
705	{
706	case REG:
707	  fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
708	  break;
709	case CONST_INT:
710	  {
711	    HOST_WIDE_INT v = INTVAL (op);
712
713	    fprintf (file, "#");
714	    /* Trickery to avoid problems with shifting 32 bits at a time.  */
715	    v = v >> 16;
716	    v = v >> 16;
717	    rx_print_integer (file, v);
718	    break;
719	  }
720	case CONST_DOUBLE:
721	  fprintf (file, "#");
722	  rx_print_integer (file, CONST_DOUBLE_HIGH (op));
723	  break;
724	case MEM:
725	  if (! WORDS_BIG_ENDIAN)
726	    op = adjust_address (op, SImode, 4);
727	  output_address (XEXP (op, 0));
728	  break;
729	default:
730	  gcc_unreachable ();
731	}
732      break;
733
734    case 'L':
735      switch (GET_CODE (op))
736	{
737	case REG:
738	  fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
739	  break;
740	case CONST_INT:
741	  fprintf (file, "#");
742	  rx_print_integer (file, INTVAL (op) & 0xffffffff);
743	  break;
744	case CONST_DOUBLE:
745	  fprintf (file, "#");
746	  rx_print_integer (file, CONST_DOUBLE_LOW (op));
747	  break;
748	case MEM:
749	  if (WORDS_BIG_ENDIAN)
750	    op = adjust_address (op, SImode, 4);
751	  output_address (XEXP (op, 0));
752	  break;
753	default:
754	  gcc_unreachable ();
755	}
756      break;
757
758    case 'N':
759      gcc_assert (CONST_INT_P (op));
760      fprintf (file, "#");
761      rx_print_integer (file, - INTVAL (op));
762      break;
763
764    case 'P':
765      fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
766      break;
767
768    case 'R':
769      gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
770      unsigned_load = true;
771      /* Fall through.  */
772    case 'Q':
773      if (MEM_P (op))
774	{
775	  HOST_WIDE_INT offset;
776	  rtx mem = op;
777
778	  op = XEXP (op, 0);
779
780	  if (REG_P (op))
781	    offset = 0;
782	  else if (GET_CODE (op) == PLUS)
783	    {
784	      rtx displacement;
785
786	      if (REG_P (XEXP (op, 0)))
787		{
788		  displacement = XEXP (op, 1);
789		  op = XEXP (op, 0);
790		}
791	      else
792		{
793		  displacement = XEXP (op, 0);
794		  op = XEXP (op, 1);
795		  gcc_assert (REG_P (op));
796		}
797
798	      gcc_assert (CONST_INT_P (displacement));
799	      offset = INTVAL (displacement);
800	      gcc_assert (offset >= 0);
801
802	      fprintf (file, "%ld", offset);
803	    }
804	  else
805	    gcc_unreachable ();
806
807	  fprintf (file, "[");
808	  rx_print_operand (file, op, 0);
809	  fprintf (file, "].");
810
811	  switch (GET_MODE_SIZE (GET_MODE (mem)))
812	    {
813	    case 1:
814	      gcc_assert (offset <= 65535 * 1);
815	      fprintf (file, unsigned_load ? "UB" : "B");
816	      break;
817	    case 2:
818	      gcc_assert (offset % 2 == 0);
819	      gcc_assert (offset <= 65535 * 2);
820	      fprintf (file, unsigned_load ? "UW" : "W");
821	      break;
822	    case 4:
823	      gcc_assert (offset % 4 == 0);
824	      gcc_assert (offset <= 65535 * 4);
825	      fprintf (file, "L");
826	      break;
827	    default:
828	      gcc_unreachable ();
829	    }
830	  break;
831	}
832
833      /* Fall through.  */
834
835    default:
836      if (GET_CODE (op) == CONST
837	  && GET_CODE (XEXP (op, 0)) == UNSPEC)
838	op = XEXP (op, 0);
839      else if (GET_CODE (op) == CONST
840	       && GET_CODE (XEXP (op, 0)) == PLUS
841	       && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
842	       && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
843	{
844	  if (print_hash)
845	    fprintf (file, "#");
846	  fprintf (file, "(");
847	  rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
848	  fprintf (file, " + ");
849	  output_addr_const (file, XEXP (XEXP (op, 0), 1));
850	  fprintf (file, ")");
851	  return;
852	}
853
854      switch (GET_CODE (op))
855	{
856	case MULT:
857	  /* Should be the scaled part of an
858	     indexed register indirect address.  */
859	  {
860	    rtx base = XEXP (op, 0);
861	    rtx index = XEXP (op, 1);
862
863	    /* Check for a swaped index register and scaling factor.
864	       Not sure if this can happen, but be prepared to handle it.  */
865	    if (CONST_INT_P (base) && REG_P (index))
866	      {
867		rtx tmp = base;
868		base = index;
869		index = tmp;
870	      }
871
872	    gcc_assert (REG_P (base));
873	    gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
874	    gcc_assert (CONST_INT_P (index));
875	    /* Do not try to verify the value of the scalar as it is based
876	       on the mode of the MEM not the mode of the MULT.  (Which
877	       will always be SImode).  */
878	    fprintf (file, "%s", reg_names [REGNO (base)]);
879	    break;
880	  }
881
882	case MEM:
883	  output_address (XEXP (op, 0));
884	  break;
885
886	case PLUS:
887	  output_address (op);
888	  break;
889
890	case REG:
891	  gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
892	  fprintf (file, "%s", reg_names [REGNO (op)]);
893	  break;
894
895	case SUBREG:
896	  gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
897	  fprintf (file, "%s", reg_names [subreg_regno (op)]);
898	  break;
899
900	  /* This will only be single precision....  */
901	case CONST_DOUBLE:
902	  {
903	    unsigned long val;
904	    REAL_VALUE_TYPE rv;
905
906	    REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
907	    REAL_VALUE_TO_TARGET_SINGLE (rv, val);
908	    if (print_hash)
909	      fprintf (file, "#");
910	    fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
911	    break;
912	  }
913
914	case CONST_INT:
915	  if (print_hash)
916	    fprintf (file, "#");
917	  rx_print_integer (file, INTVAL (op));
918	  break;
919
920	case UNSPEC:
921	  switch (XINT (op, 1))
922	    {
923	    case UNSPEC_PID_ADDR:
924	      {
925		rtx sym, add;
926
927		if (print_hash)
928		  fprintf (file, "#");
929		sym = XVECEXP (op, 0, 0);
930		add = NULL_RTX;
931		fprintf (file, "(");
932		if (GET_CODE (sym) == PLUS)
933		  {
934		    add = XEXP (sym, 1);
935		    sym = XEXP (sym, 0);
936		  }
937		output_addr_const (file, sym);
938		if (add != NULL_RTX)
939		  {
940		    fprintf (file, "+");
941		    output_addr_const (file, add);
942		  }
943		fprintf (file, "-__pid_base");
944		fprintf (file, ")");
945		return;
946	      }
947	    }
948	  /* Fall through */
949
950	case CONST:
951	case SYMBOL_REF:
952	case LABEL_REF:
953	case CODE_LABEL:
954	  rx_print_operand_address (file, op);
955	  break;
956
957	default:
958	  gcc_unreachable ();
959	}
960      break;
961    }
962}
963
964/* Maybe convert an operand into its PID format.  */
965
966rtx
967rx_maybe_pidify_operand (rtx op, int copy_to_reg)
968{
969  if (rx_pid_data_operand (op) == PID_UNENCODED)
970    {
971      if (GET_CODE (op) == MEM)
972	{
973	  rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
974	  op = replace_equiv_address (op, a);
975	}
976      else
977	{
978	  op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
979	}
980
981      if (copy_to_reg)
982	op = copy_to_mode_reg (GET_MODE (op), op);
983    }
984  return op;
985}
986
987/* Returns an assembler template for a move instruction.  */
988
989char *
990rx_gen_move_template (rtx * operands, bool is_movu)
991{
992  static char  out_template [64];
993  const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
994  const char * src_template;
995  const char * dst_template;
996  rtx          dest = operands[0];
997  rtx          src  = operands[1];
998
999  /* Decide which extension, if any, should be given to the move instruction.  */
1000  switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
1001    {
1002    case QImode:
1003      /* The .B extension is not valid when
1004	 loading an immediate into a register.  */
1005      if (! REG_P (dest) || ! CONST_INT_P (src))
1006	extension = ".B";
1007      break;
1008    case HImode:
1009      if (! REG_P (dest) || ! CONST_INT_P (src))
1010	/* The .W extension is not valid when
1011	   loading an immediate into a register.  */
1012	extension = ".W";
1013      break;
1014    case DFmode:
1015    case DImode:
1016    case SFmode:
1017    case SImode:
1018      extension = ".L";
1019      break;
1020    case VOIDmode:
1021      /* This mode is used by constants.  */
1022      break;
1023    default:
1024      debug_rtx (src);
1025      gcc_unreachable ();
1026    }
1027
1028  if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
1029    {
1030      gcc_assert (GET_MODE (src) != DImode);
1031      gcc_assert (GET_MODE (src) != DFmode);
1032
1033      src_template = "(%A1 - __pid_base)[%P1]";
1034    }
1035  else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1036    {
1037      gcc_assert (GET_MODE (src) != DImode);
1038      gcc_assert (GET_MODE (src) != DFmode);
1039
1040      src_template = "%%gp(%A1)[%G1]";
1041    }
1042  else
1043    src_template = "%1";
1044
1045  if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
1046    {
1047      gcc_assert (GET_MODE (dest) != DImode);
1048      gcc_assert (GET_MODE (dest) != DFmode);
1049
1050      dst_template = "%%gp(%A0)[%G0]";
1051    }
1052  else
1053    dst_template = "%0";
1054
1055  if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1056    {
1057      gcc_assert (! is_movu);
1058
1059      if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1060	sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1061      else
1062	sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1063    }
1064  else
1065    sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1066	     extension, src_template, dst_template);
1067  return out_template;
1068}
1069
1070/* Return VALUE rounded up to the next ALIGNMENT boundary.  */
1071
1072static inline unsigned int
1073rx_round_up (unsigned int value, unsigned int alignment)
1074{
1075  alignment -= 1;
1076  return (value + alignment) & (~ alignment);
1077}
1078
1079/* Return the number of bytes in the argument registers
1080   occupied by an argument of type TYPE and mode MODE.  */
1081
1082static unsigned int
1083rx_function_arg_size (machine_mode mode, const_tree type)
1084{
1085  unsigned int num_bytes;
1086
1087  num_bytes = (mode == BLKmode)
1088    ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1089  return rx_round_up (num_bytes, UNITS_PER_WORD);
1090}
1091
1092#define NUM_ARG_REGS		4
1093#define MAX_NUM_ARG_BYTES	(NUM_ARG_REGS * UNITS_PER_WORD)
1094
1095/* Return an RTL expression describing the register holding a function
1096   parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1097   be passed on the stack.  CUM describes the previous parameters to the
1098   function and NAMED is false if the parameter is part of a variable
1099   parameter list, or the last named parameter before the start of a
1100   variable parameter list.  */
1101
1102static rtx
1103rx_function_arg (cumulative_args_t cum, machine_mode mode,
1104		 const_tree type, bool named)
1105{
1106  unsigned int next_reg;
1107  unsigned int bytes_so_far = *get_cumulative_args (cum);
1108  unsigned int size;
1109  unsigned int rounded_size;
1110
1111  /* An exploded version of rx_function_arg_size.  */
1112  size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1113  /* If the size is not known it cannot be passed in registers.  */
1114  if (size < 1)
1115    return NULL_RTX;
1116
1117  rounded_size = rx_round_up (size, UNITS_PER_WORD);
1118
1119  /* Don't pass this arg via registers if there
1120     are insufficient registers to hold all of it.  */
1121  if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1122    return NULL_RTX;
1123
1124  /* Unnamed arguments and the last named argument in a
1125     variadic function are always passed on the stack.  */
1126  if (!named)
1127    return NULL_RTX;
1128
1129  /* Structures must occupy an exact number of registers,
1130     otherwise they are passed on the stack.  */
1131  if ((type == NULL || AGGREGATE_TYPE_P (type))
1132      && (size % UNITS_PER_WORD) != 0)
1133    return NULL_RTX;
1134
1135  next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1136
1137  return gen_rtx_REG (mode, next_reg);
1138}
1139
1140static void
1141rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
1142			 const_tree type, bool named ATTRIBUTE_UNUSED)
1143{
1144  *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
1145}
1146
1147static unsigned int
1148rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1149			  const_tree type ATTRIBUTE_UNUSED)
1150{
1151  /* Older versions of the RX backend aligned all on-stack arguments
1152     to 32-bits.  The RX C ABI however says that they should be
1153     aligned to their natural alignment.  (See section 5.2.2 of the ABI).  */
1154  if (TARGET_GCC_ABI)
1155    return STACK_BOUNDARY;
1156
1157  if (type)
1158    {
1159      if (DECL_P (type))
1160	return DECL_ALIGN (type);
1161      return TYPE_ALIGN (type);
1162    }
1163
1164  return PARM_BOUNDARY;
1165}
1166
1167/* Return an RTL describing where a function return value of type RET_TYPE
1168   is held.  */
1169
1170static rtx
1171rx_function_value (const_tree ret_type,
1172		   const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1173		   bool       outgoing ATTRIBUTE_UNUSED)
1174{
1175  machine_mode mode = TYPE_MODE (ret_type);
1176
1177  /* RX ABI specifies that small integer types are
1178     promoted to int when returned by a function.  */
1179  if (GET_MODE_SIZE (mode) > 0
1180      && GET_MODE_SIZE (mode) < 4
1181      && ! COMPLEX_MODE_P (mode)
1182      )
1183    return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1184
1185  return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1186}
1187
1188/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1189   regard to function returns as does TARGET_FUNCTION_VALUE.  */
1190
1191static machine_mode
1192rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1193			  machine_mode mode,
1194			  int * punsignedp ATTRIBUTE_UNUSED,
1195			  const_tree funtype ATTRIBUTE_UNUSED,
1196			  int for_return)
1197{
1198  if (for_return != 1
1199      || GET_MODE_SIZE (mode) >= 4
1200      || COMPLEX_MODE_P (mode)
1201      || GET_MODE_SIZE (mode) < 1)
1202    return mode;
1203
1204  return SImode;
1205}
1206
1207static bool
1208rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1209{
1210  HOST_WIDE_INT size;
1211
1212  if (TYPE_MODE (type) != BLKmode
1213      && ! AGGREGATE_TYPE_P (type))
1214    return false;
1215
1216  size = int_size_in_bytes (type);
1217  /* Large structs and those whose size is not an
1218     exact multiple of 4 are returned in memory.  */
1219  return size < 1
1220    || size > 16
1221    || (size % UNITS_PER_WORD) != 0;
1222}
1223
1224static rtx
1225rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1226		     int incoming ATTRIBUTE_UNUSED)
1227{
1228  return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1229}
1230
1231static bool
1232rx_return_in_msb (const_tree valtype)
1233{
1234  return TARGET_BIG_ENDIAN_DATA
1235    && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1236}
1237
1238/* Returns true if the provided function has the specified attribute.  */
1239
1240static inline bool
1241has_func_attr (const_tree decl, const char * func_attr)
1242{
1243  if (decl == NULL_TREE)
1244    decl = current_function_decl;
1245
1246  return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1247}
1248
1249/* Returns true if the provided function has the "fast_interrupt" attribute.  */
1250
1251static inline bool
1252is_fast_interrupt_func (const_tree decl)
1253{
1254  return has_func_attr (decl, "fast_interrupt");
1255}
1256
1257/* Returns true if the provided function has the "interrupt" attribute.  */
1258
1259static inline bool
1260is_interrupt_func (const_tree decl)
1261{
1262  return has_func_attr (decl, "interrupt");
1263}
1264
1265/* Returns true if the provided function has the "naked" attribute.  */
1266
1267static inline bool
1268is_naked_func (const_tree decl)
1269{
1270  return has_func_attr (decl, "naked");
1271}
1272
1273static bool use_fixed_regs = false;
1274
1275static void
1276rx_conditional_register_usage (void)
1277{
1278  static bool using_fixed_regs = false;
1279
1280  if (TARGET_PID)
1281    {
1282      rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1283      fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1284    }
1285
1286  if (rx_small_data_limit > 0)
1287    {
1288      if (TARGET_PID)
1289	rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1290      else
1291	rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1292
1293      fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1294    }
1295
1296  if (use_fixed_regs != using_fixed_regs)
1297    {
1298      static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1299      static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1300
1301      if (use_fixed_regs)
1302	{
1303	  unsigned int r;
1304
1305	  memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1306	  memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1307
1308	  /* This is for fast interrupt handlers.  Any register in
1309	     the range r10 to r13 (inclusive) that is currently
1310	     marked as fixed is now a viable, call-used register.  */
1311	  for (r = 10; r <= 13; r++)
1312	    if (fixed_regs[r])
1313	      {
1314		fixed_regs[r] = 0;
1315		call_used_regs[r] = 1;
1316	      }
1317
1318	  /* Mark r7 as fixed.  This is just a hack to avoid
1319	     altering the reg_alloc_order array so that the newly
1320	     freed r10-r13 registers are the preferred registers.  */
1321	  fixed_regs[7] = call_used_regs[7] = 1;
1322	}
1323      else
1324	{
1325	  /* Restore the normal register masks.  */
1326	  memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1327	  memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1328	}
1329
1330      using_fixed_regs = use_fixed_regs;
1331    }
1332}
1333
1334struct decl_chain
1335{
1336  tree fndecl;
1337  struct decl_chain * next;
1338};
1339
1340/* Stack of decls for which we have issued warnings.  */
1341static struct decl_chain * warned_decls = NULL;
1342
1343static void
1344add_warned_decl (tree fndecl)
1345{
1346  struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1347
1348  warned->fndecl = fndecl;
1349  warned->next = warned_decls;
1350  warned_decls = warned;
1351}
1352
1353/* Returns TRUE if FNDECL is on our list of warned about decls.  */
1354
1355static bool
1356already_warned (tree fndecl)
1357{
1358  struct decl_chain * warned;
1359
1360  for (warned = warned_decls;
1361       warned != NULL;
1362       warned = warned->next)
1363    if (warned->fndecl == fndecl)
1364      return true;
1365
1366  return false;
1367}
1368
1369/* Perform any actions necessary before starting to compile FNDECL.
1370   For the RX we use this to make sure that we have the correct
1371   set of register masks selected.  If FNDECL is NULL then we are
1372   compiling top level things.  */
1373
1374static void
1375rx_set_current_function (tree fndecl)
1376{
1377  /* Remember the last target of rx_set_current_function.  */
1378  static tree rx_previous_fndecl;
1379  bool prev_was_fast_interrupt;
1380  bool current_is_fast_interrupt;
1381
1382  /* Only change the context if the function changes.  This hook is called
1383     several times in the course of compiling a function, and we don't want
1384     to slow things down too much or call target_reinit when it isn't safe.  */
1385  if (fndecl == rx_previous_fndecl)
1386    return;
1387
1388  prev_was_fast_interrupt
1389    = rx_previous_fndecl
1390    ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1391
1392  current_is_fast_interrupt
1393    = fndecl ? is_fast_interrupt_func (fndecl) : false;
1394
1395  if (prev_was_fast_interrupt != current_is_fast_interrupt)
1396    {
1397      use_fixed_regs = current_is_fast_interrupt;
1398      target_reinit ();
1399    }
1400
1401  if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1402    {
1403      /* We do not warn about the first fast interrupt routine that
1404	 we see.  Instead we just push it onto the stack.  */
1405      if (warned_decls == NULL)
1406	add_warned_decl (fndecl);
1407
1408      /* Otherwise if this fast interrupt is one for which we have
1409	 not already issued a warning, generate one and then push
1410	 it onto the stack as well.  */
1411      else if (! already_warned (fndecl))
1412	{
1413	  warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1414		   fndecl, warned_decls->fndecl);
1415	  add_warned_decl (fndecl);
1416	}
1417    }
1418
1419  rx_previous_fndecl = fndecl;
1420}
1421
1422/* Typical stack layout should looks like this after the function's prologue:
1423
1424                            |    |
1425                              --                       ^
1426                            |    | \                   |
1427                            |    |   arguments saved   | Increasing
1428                            |    |   on the stack      |  addresses
1429    PARENT   arg pointer -> |    | /
1430  -------------------------- ---- -------------------
1431    CHILD                   |ret |   return address
1432                              --
1433                            |    | \
1434                            |    |   call saved
1435                            |    |   registers
1436			    |    | /
1437                              --
1438                            |    | \
1439                            |    |   local
1440                            |    |   variables
1441        frame pointer ->    |    | /
1442                              --
1443                            |    | \
1444                            |    |   outgoing          | Decreasing
1445                            |    |   arguments         |  addresses
1446   current stack pointer -> |    | /                   |
1447  -------------------------- ---- ------------------   V
1448                            |    |                 */
1449
1450static unsigned int
1451bit_count (unsigned int x)
1452{
1453  const unsigned int m1 = 0x55555555;
1454  const unsigned int m2 = 0x33333333;
1455  const unsigned int m4 = 0x0f0f0f0f;
1456
1457  x -= (x >> 1) & m1;
1458  x = (x & m2) + ((x >> 2) & m2);
1459  x = (x + (x >> 4)) & m4;
1460  x += x >>  8;
1461
1462  return (x + (x >> 16)) & 0x3f;
1463}
1464
1465#define MUST_SAVE_ACC_REGISTER			\
1466  (TARGET_SAVE_ACC_REGISTER			\
1467   && (is_interrupt_func (NULL_TREE)		\
1468       || is_fast_interrupt_func (NULL_TREE)))
1469
1470/* Returns either the lowest numbered and highest numbered registers that
1471   occupy the call-saved area of the stack frame, if the registers are
1472   stored as a contiguous block, or else a bitmask of the individual
1473   registers if they are stored piecemeal.
1474
1475   Also computes the size of the frame and the size of the outgoing
1476   arguments block (in bytes).  */
1477
1478static void
1479rx_get_stack_layout (unsigned int * lowest,
1480		     unsigned int * highest,
1481		     unsigned int * register_mask,
1482		     unsigned int * frame_size,
1483		     unsigned int * stack_size)
1484{
1485  unsigned int reg;
1486  unsigned int low;
1487  unsigned int high;
1488  unsigned int fixed_reg = 0;
1489  unsigned int save_mask;
1490  unsigned int pushed_mask;
1491  unsigned int unneeded_pushes;
1492
1493  if (is_naked_func (NULL_TREE))
1494    {
1495      /* Naked functions do not create their own stack frame.
1496	 Instead the programmer must do that for us.  */
1497      * lowest = 0;
1498      * highest = 0;
1499      * register_mask = 0;
1500      * frame_size = 0;
1501      * stack_size = 0;
1502      return;
1503    }
1504
1505  for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1506    {
1507      if ((df_regs_ever_live_p (reg)
1508	   /* Always save all call clobbered registers inside non-leaf
1509	      interrupt handlers, even if they are not live - they may
1510	      be used in (non-interrupt aware) routines called from this one.  */
1511	   || (call_used_regs[reg]
1512	       && is_interrupt_func (NULL_TREE)
1513	       && ! crtl->is_leaf))
1514	  && (! call_used_regs[reg]
1515	      /* Even call clobbered registered must
1516		 be pushed inside interrupt handlers.  */
1517	      || is_interrupt_func (NULL_TREE)
1518	      /* Likewise for fast interrupt handlers, except registers r10 -
1519		 r13.  These are normally call-saved, but may have been set
1520		 to call-used by rx_conditional_register_usage.  If so then
1521		 they can be used in the fast interrupt handler without
1522		 saving them on the stack.  */
1523	      || (is_fast_interrupt_func (NULL_TREE)
1524		  && ! IN_RANGE (reg, 10, 13))))
1525	{
1526	  if (low == 0)
1527	    low = reg;
1528	  high = reg;
1529
1530	  save_mask |= 1 << reg;
1531	}
1532
1533      /* Remember if we see a fixed register
1534	 after having found the low register.  */
1535      if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1536	fixed_reg = reg;
1537    }
1538
1539  /* If we have to save the accumulator register, make sure
1540     that at least two registers are pushed into the frame.  */
1541  if (MUST_SAVE_ACC_REGISTER
1542      && bit_count (save_mask) < 2)
1543    {
1544      save_mask |= (1 << 13) | (1 << 14);
1545      if (low == 0)
1546	low = 13;
1547      if (high == 0 || low == high)
1548	high = low + 1;
1549    }
1550
1551  /* Decide if it would be faster fill in the call-saved area of the stack
1552     frame using multiple PUSH instructions instead of a single PUSHM
1553     instruction.
1554
1555     SAVE_MASK is a bitmask of the registers that must be stored in the
1556     call-save area.  PUSHED_MASK is a bitmask of the registers that would
1557     be pushed into the area if we used a PUSHM instruction.  UNNEEDED_PUSHES
1558     is a bitmask of those registers in pushed_mask that are not in
1559     save_mask.
1560
1561     We use a simple heuristic that says that it is better to use
1562     multiple PUSH instructions if the number of unnecessary pushes is
1563     greater than the number of necessary pushes.
1564
1565     We also use multiple PUSH instructions if there are any fixed registers
1566     between LOW and HIGH.  The only way that this can happen is if the user
1567     has specified --fixed-<reg-name> on the command line and in such
1568     circumstances we do not want to touch the fixed registers at all.
1569
1570     FIXME: Is it worth improving this heuristic ?  */
1571  pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1572  unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1573
1574  if ((fixed_reg && fixed_reg <= high)
1575      || (optimize_function_for_speed_p (cfun)
1576	  && bit_count (save_mask) < bit_count (unneeded_pushes)))
1577    {
1578      /* Use multiple pushes.  */
1579      * lowest = 0;
1580      * highest = 0;
1581      * register_mask = save_mask;
1582    }
1583  else
1584    {
1585      /* Use one push multiple instruction.  */
1586      * lowest = low;
1587      * highest = high;
1588      * register_mask = 0;
1589    }
1590
1591  * frame_size = rx_round_up
1592    (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1593
1594  if (crtl->args.size > 0)
1595    * frame_size += rx_round_up
1596      (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1597
1598  * stack_size = rx_round_up
1599    (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1600}
1601
1602/* Generate a PUSHM instruction that matches the given operands.  */
1603
1604void
1605rx_emit_stack_pushm (rtx * operands)
1606{
1607  HOST_WIDE_INT last_reg;
1608  rtx first_push;
1609
1610  gcc_assert (CONST_INT_P (operands[0]));
1611  last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1612
1613  gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1614  first_push = XVECEXP (operands[1], 0, 1);
1615  gcc_assert (SET_P (first_push));
1616  first_push = SET_SRC (first_push);
1617  gcc_assert (REG_P (first_push));
1618
1619  asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1620	       reg_names [REGNO (first_push) - last_reg],
1621	       reg_names [REGNO (first_push)]);
1622}
1623
1624/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate.  */
1625
1626static rtx
1627gen_rx_store_vector (unsigned int low, unsigned int high)
1628{
1629  unsigned int i;
1630  unsigned int count = (high - low) + 2;
1631  rtx vector;
1632
1633  vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1634
1635  XVECEXP (vector, 0, 0) =
1636    gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1637		 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1638				GEN_INT ((count - 1) * UNITS_PER_WORD)));
1639
1640  for (i = 0; i < count - 1; i++)
1641    XVECEXP (vector, 0, i + 1) =
1642      gen_rtx_SET (VOIDmode,
1643		   gen_rtx_MEM (SImode,
1644				gen_rtx_MINUS (SImode, stack_pointer_rtx,
1645					       GEN_INT ((i + 1) * UNITS_PER_WORD))),
1646		   gen_rtx_REG (SImode, high - i));
1647  return vector;
1648}
1649
1650/* Mark INSN as being frame related.  If it is a PARALLEL
1651   then mark each element as being frame related as well.  */
1652
1653static void
1654mark_frame_related (rtx insn)
1655{
1656  RTX_FRAME_RELATED_P (insn) = 1;
1657  insn = PATTERN (insn);
1658
1659  if (GET_CODE (insn) == PARALLEL)
1660    {
1661      unsigned int i;
1662
1663      for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1664	RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1665    }
1666}
1667
1668static bool
1669ok_for_max_constant (HOST_WIDE_INT val)
1670{
1671  if (rx_max_constant_size == 0  || rx_max_constant_size == 4)
1672    /* If there is no constraint on the size of constants
1673       used as operands, then any value is legitimate.  */
1674    return true;
1675
1676  /* rx_max_constant_size specifies the maximum number
1677     of bytes that can be used to hold a signed value.  */
1678  return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1679		        ( 1 << (rx_max_constant_size * 8)));
1680}
1681
1682/* Generate an ADD of SRC plus VAL into DEST.
1683   Handles the case where VAL is too big for max_constant_value.
1684   Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true.  */
1685
1686static void
1687gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1688{
1689  rtx insn;
1690
1691  if (val == NULL_RTX || INTVAL (val) == 0)
1692    {
1693      gcc_assert (dest != src);
1694
1695      insn = emit_move_insn (dest, src);
1696    }
1697  else if (ok_for_max_constant (INTVAL (val)))
1698    insn = emit_insn (gen_addsi3 (dest, src, val));
1699  else
1700    {
1701      /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1702	 will not reject it.  */
1703      val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1704      insn = emit_insn (gen_addsi3 (dest, src, val));
1705
1706      if (is_frame_related)
1707	/* We have to provide our own frame related note here
1708	   as the dwarf2out code cannot be expected to grok
1709	   our unspec.  */
1710	add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1711		      gen_rtx_SET (SImode, dest,
1712				   gen_rtx_PLUS (SImode, src, val)));
1713      return;
1714    }
1715
1716  if (is_frame_related)
1717    RTX_FRAME_RELATED_P (insn) = 1;
1718  return;
1719}
1720
1721void
1722rx_expand_prologue (void)
1723{
1724  unsigned int stack_size;
1725  unsigned int frame_size;
1726  unsigned int mask;
1727  unsigned int low;
1728  unsigned int high;
1729  unsigned int reg;
1730  rtx insn;
1731
1732  /* Naked functions use their own, programmer provided prologues.  */
1733  if (is_naked_func (NULL_TREE))
1734    return;
1735
1736  rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1737
1738  if (flag_stack_usage_info)
1739    current_function_static_stack_size = frame_size + stack_size;
1740
1741  /* If we use any of the callee-saved registers, save them now.  */
1742  if (mask)
1743    {
1744      /* Push registers in reverse order.  */
1745      for (reg = CC_REGNUM; reg --;)
1746	if (mask & (1 << reg))
1747	  {
1748	    insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1749	    mark_frame_related (insn);
1750	  }
1751    }
1752  else if (low)
1753    {
1754      if (high == low)
1755	insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1756      else
1757	insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1758						    * UNITS_PER_WORD),
1759					   gen_rx_store_vector (low, high)));
1760      mark_frame_related (insn);
1761    }
1762
1763  if (MUST_SAVE_ACC_REGISTER)
1764    {
1765      unsigned int acc_high, acc_low;
1766
1767      /* Interrupt handlers have to preserve the accumulator
1768	 register if so requested by the user.  Use the first
1769         two pushed registers as intermediaries.  */
1770      if (mask)
1771	{
1772	  acc_low = acc_high = 0;
1773
1774	  for (reg = 1; reg < CC_REGNUM; reg ++)
1775	    if (mask & (1 << reg))
1776	      {
1777		if (acc_low == 0)
1778		  acc_low = reg;
1779		else
1780		  {
1781		    acc_high = reg;
1782		    break;
1783		  }
1784	      }
1785
1786	  /* We have assumed that there are at least two registers pushed... */
1787	  gcc_assert (acc_high != 0);
1788
1789	  /* Note - the bottom 16 bits of the accumulator are inaccessible.
1790	     We just assume that they are zero.  */
1791	  emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1792	  emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1793	  emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1794	  emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1795	}
1796      else
1797	{
1798	  acc_low = low;
1799	  acc_high = low + 1;
1800
1801	  /* We have assumed that there are at least two registers pushed... */
1802	  gcc_assert (acc_high <= high);
1803
1804	  emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1805	  emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1806	  emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1807				      gen_rx_store_vector (acc_low, acc_high)));
1808	}
1809    }
1810
1811  /* If needed, set up the frame pointer.  */
1812  if (frame_pointer_needed)
1813    gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1814		  GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1815
1816  /* Allocate space for the outgoing args.
1817     If the stack frame has not already been set up then handle this as well.  */
1818  if (stack_size)
1819    {
1820      if (frame_size)
1821	{
1822	  if (frame_pointer_needed)
1823	    gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1824			  GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1825	  else
1826	    gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1827			  GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1828			  true);
1829	}
1830      else
1831	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1832		      GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1833    }
1834  else if (frame_size)
1835    {
1836      if (! frame_pointer_needed)
1837	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1838		      GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1839      else
1840	gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1841		      true);
1842    }
1843}
1844
1845static void
1846add_vector_labels (FILE *file, const char *aname)
1847{
1848  tree vec_attr;
1849  tree val_attr;
1850  const char *vname = "vect";
1851  const char *s;
1852  int vnum;
1853
1854  /* This node is for the vector/interrupt tag itself */
1855  vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1856  if (!vec_attr)
1857    return;
1858
1859  /* Now point it at the first argument */
1860  vec_attr = TREE_VALUE (vec_attr);
1861
1862  /* Iterate through the arguments.  */
1863  while (vec_attr)
1864    {
1865      val_attr = TREE_VALUE (vec_attr);
1866      switch (TREE_CODE (val_attr))
1867	{
1868	case STRING_CST:
1869	  s = TREE_STRING_POINTER (val_attr);
1870	  goto string_id_common;
1871
1872	case IDENTIFIER_NODE:
1873	  s = IDENTIFIER_POINTER (val_attr);
1874
1875	string_id_common:
1876	  if (strcmp (s, "$default") == 0)
1877	    {
1878	      fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1879	      fprintf (file, "$tableentry$default$%s:\n", vname);
1880	    }
1881	  else
1882	    vname = s;
1883	  break;
1884
1885	case INTEGER_CST:
1886	  vnum = TREE_INT_CST_LOW (val_attr);
1887
1888	  fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1889	  fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1890	  break;
1891
1892	default:
1893	  ;
1894	}
1895
1896      vec_attr = TREE_CHAIN (vec_attr);
1897    }
1898
1899}
1900
1901static void
1902rx_output_function_prologue (FILE * file,
1903			     HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1904{
1905  add_vector_labels (file, "interrupt");
1906  add_vector_labels (file, "vector");
1907
1908  if (is_fast_interrupt_func (NULL_TREE))
1909    asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1910
1911  if (is_interrupt_func (NULL_TREE))
1912    asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1913
1914  if (is_naked_func (NULL_TREE))
1915    asm_fprintf (file, "\t; Note: Naked Function\n");
1916
1917  if (cfun->static_chain_decl != NULL)
1918    asm_fprintf (file, "\t; Note: Nested function declared "
1919		 "inside another function.\n");
1920
1921  if (crtl->calls_eh_return)
1922    asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1923}
1924
1925/* Generate a POPM or RTSD instruction that matches the given operands.  */
1926
1927void
1928rx_emit_stack_popm (rtx * operands, bool is_popm)
1929{
1930  HOST_WIDE_INT stack_adjust;
1931  HOST_WIDE_INT last_reg;
1932  rtx first_push;
1933
1934  gcc_assert (CONST_INT_P (operands[0]));
1935  stack_adjust = INTVAL (operands[0]);
1936
1937  gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1938  last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1939
1940  first_push = XVECEXP (operands[1], 0, 1);
1941  gcc_assert (SET_P (first_push));
1942  first_push = SET_DEST (first_push);
1943  gcc_assert (REG_P (first_push));
1944
1945  if (is_popm)
1946    asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1947		 reg_names [REGNO (first_push)],
1948		 reg_names [REGNO (first_push) + last_reg]);
1949  else
1950    asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1951		 (int) stack_adjust,
1952		 reg_names [REGNO (first_push)],
1953		 reg_names [REGNO (first_push) + last_reg]);
1954}
1955
1956/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate.  */
1957
1958static rtx
1959gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1960{
1961  unsigned int i;
1962  unsigned int bias = 3;
1963  unsigned int count = (high - low) + bias;
1964  rtx vector;
1965
1966  vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1967
1968  XVECEXP (vector, 0, 0) =
1969    gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1970		 plus_constant (Pmode, stack_pointer_rtx, adjust));
1971
1972  for (i = 0; i < count - 2; i++)
1973    XVECEXP (vector, 0, i + 1) =
1974      gen_rtx_SET (VOIDmode,
1975		   gen_rtx_REG (SImode, low + i),
1976		   gen_rtx_MEM (SImode,
1977				i == 0 ? stack_pointer_rtx
1978				: plus_constant (Pmode, stack_pointer_rtx,
1979						 i * UNITS_PER_WORD)));
1980
1981  XVECEXP (vector, 0, count - 1) = ret_rtx;
1982
1983  return vector;
1984}
1985
1986/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate.  */
1987
1988static rtx
1989gen_rx_popm_vector (unsigned int low, unsigned int high)
1990{
1991  unsigned int i;
1992  unsigned int count = (high - low) + 2;
1993  rtx vector;
1994
1995  vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1996
1997  XVECEXP (vector, 0, 0) =
1998    gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1999		 plus_constant (Pmode, stack_pointer_rtx,
2000				(count - 1) * UNITS_PER_WORD));
2001
2002  for (i = 0; i < count - 1; i++)
2003    XVECEXP (vector, 0, i + 1) =
2004      gen_rtx_SET (VOIDmode,
2005		   gen_rtx_REG (SImode, low + i),
2006		   gen_rtx_MEM (SImode,
2007				i == 0 ? stack_pointer_rtx
2008				: plus_constant (Pmode, stack_pointer_rtx,
2009						 i * UNITS_PER_WORD)));
2010
2011  return vector;
2012}
2013
2014/* Returns true if a simple return insn can be used.  */
2015
2016bool
2017rx_can_use_simple_return (void)
2018{
2019  unsigned int low;
2020  unsigned int high;
2021  unsigned int frame_size;
2022  unsigned int stack_size;
2023  unsigned int register_mask;
2024
2025  if (is_naked_func (NULL_TREE)
2026      || is_fast_interrupt_func (NULL_TREE)
2027      || is_interrupt_func (NULL_TREE))
2028    return false;
2029
2030  rx_get_stack_layout (& low, & high, & register_mask,
2031		       & frame_size, & stack_size);
2032
2033  return (register_mask == 0
2034	  && (frame_size + stack_size) == 0
2035	  && low == 0);
2036}
2037
2038void
2039rx_expand_epilogue (bool is_sibcall)
2040{
2041  unsigned int low;
2042  unsigned int high;
2043  unsigned int frame_size;
2044  unsigned int stack_size;
2045  unsigned int register_mask;
2046  unsigned int regs_size;
2047  unsigned int reg;
2048  unsigned HOST_WIDE_INT total_size;
2049
2050  /* FIXME: We do not support indirect sibcalls at the moment becaause we
2051     cannot guarantee that the register holding the function address is a
2052     call-used register.  If it is a call-saved register then the stack
2053     pop instructions generated in the epilogue will corrupt the address
2054     before it is used.
2055
2056     Creating a new call-used-only register class works but then the
2057     reload pass gets stuck because it cannot always find a call-used
2058     register for spilling sibcalls.
2059
2060     The other possible solution is for this pass to scan forward for the
2061     sibcall instruction (if it has been generated) and work out if it
2062     is an indirect sibcall using a call-saved register.  If it is then
2063     the address can copied into a call-used register in this epilogue
2064     code and the sibcall instruction modified to use that register.  */
2065
2066  if (is_naked_func (NULL_TREE))
2067    {
2068      gcc_assert (! is_sibcall);
2069
2070      /* Naked functions use their own, programmer provided epilogues.
2071	 But, in order to keep gcc happy we have to generate some kind of
2072	 epilogue RTL.  */
2073      emit_jump_insn (gen_naked_return ());
2074      return;
2075    }
2076
2077  rx_get_stack_layout (& low, & high, & register_mask,
2078		       & frame_size, & stack_size);
2079
2080  total_size = frame_size + stack_size;
2081  regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2082
2083  /* See if we are unable to use the special stack frame deconstruct and
2084     return instructions.  In most cases we can use them, but the exceptions
2085     are:
2086
2087     - Sibling calling functions deconstruct the frame but do not return to
2088       their caller.  Instead they branch to their sibling and allow their
2089       return instruction to return to this function's parent.
2090
2091     - Fast and normal interrupt handling functions have to use special
2092       return instructions.
2093
2094     - Functions where we have pushed a fragmented set of registers into the
2095       call-save area must have the same set of registers popped.  */
2096  if (is_sibcall
2097      || is_fast_interrupt_func (NULL_TREE)
2098      || is_interrupt_func (NULL_TREE)
2099      || register_mask)
2100    {
2101      /* Cannot use the special instructions - deconstruct by hand.  */
2102      if (total_size)
2103	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2104		      GEN_INT (total_size), false);
2105
2106      if (MUST_SAVE_ACC_REGISTER)
2107	{
2108	  unsigned int acc_low, acc_high;
2109
2110	  /* Reverse the saving of the accumulator register onto the stack.
2111	     Note we must adjust the saved "low" accumulator value as it
2112	     is really the middle 32-bits of the accumulator.  */
2113	  if (register_mask)
2114	    {
2115	      acc_low = acc_high = 0;
2116
2117	      for (reg = 1; reg < CC_REGNUM; reg ++)
2118		if (register_mask & (1 << reg))
2119		  {
2120		    if (acc_low == 0)
2121		      acc_low = reg;
2122		    else
2123		      {
2124			acc_high = reg;
2125			break;
2126		      }
2127		  }
2128	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2129	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2130	    }
2131	  else
2132	    {
2133	      acc_low = low;
2134	      acc_high = low + 1;
2135	      emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2136					 gen_rx_popm_vector (acc_low, acc_high)));
2137	    }
2138
2139	  emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2140				  gen_rtx_REG (SImode, acc_low),
2141				  GEN_INT (16)));
2142	  emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2143	  emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2144	}
2145
2146      if (register_mask)
2147	{
2148	  for (reg = 0; reg < CC_REGNUM; reg ++)
2149	    if (register_mask & (1 << reg))
2150	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
2151	}
2152      else if (low)
2153	{
2154	  if (high == low)
2155	    emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2156	  else
2157	    emit_insn (gen_stack_popm (GEN_INT (regs_size),
2158				       gen_rx_popm_vector (low, high)));
2159	}
2160
2161      if (is_fast_interrupt_func (NULL_TREE))
2162	{
2163	  gcc_assert (! is_sibcall);
2164	  emit_jump_insn (gen_fast_interrupt_return ());
2165	}
2166      else if (is_interrupt_func (NULL_TREE))
2167	{
2168	  gcc_assert (! is_sibcall);
2169	  emit_jump_insn (gen_exception_return ());
2170	}
2171      else if (! is_sibcall)
2172	emit_jump_insn (gen_simple_return ());
2173
2174      return;
2175    }
2176
2177  /* If we allocated space on the stack, free it now.  */
2178  if (total_size)
2179    {
2180      unsigned HOST_WIDE_INT rtsd_size;
2181
2182      /* See if we can use the RTSD instruction.  */
2183      rtsd_size = total_size + regs_size;
2184      if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2185	{
2186	  if (low)
2187	    emit_jump_insn (gen_pop_and_return
2188			    (GEN_INT (rtsd_size),
2189			     gen_rx_rtsd_vector (rtsd_size, low, high)));
2190	  else
2191	    emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2192
2193	  return;
2194	}
2195
2196      gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2197		    GEN_INT (total_size), false);
2198    }
2199
2200  if (low)
2201    emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2202					gen_rx_rtsd_vector (regs_size,
2203							    low, high)));
2204  else
2205    emit_jump_insn (gen_simple_return ());
2206}
2207
2208
2209/* Compute the offset (in words) between FROM (arg pointer
2210   or frame pointer) and TO (frame pointer or stack pointer).
2211   See ASCII art comment at the start of rx_expand_prologue
2212   for more information.  */
2213
2214int
2215rx_initial_elimination_offset (int from, int to)
2216{
2217  unsigned int low;
2218  unsigned int high;
2219  unsigned int frame_size;
2220  unsigned int stack_size;
2221  unsigned int mask;
2222
2223  rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2224
2225  if (from == ARG_POINTER_REGNUM)
2226    {
2227      /* Extend the computed size of the stack frame to
2228	 include the registers pushed in the prologue.  */
2229      if (low)
2230	frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2231      else
2232	frame_size += bit_count (mask) * UNITS_PER_WORD;
2233
2234      /* Remember to include the return address.  */
2235      frame_size += 1 * UNITS_PER_WORD;
2236
2237      if (to == FRAME_POINTER_REGNUM)
2238	return frame_size;
2239
2240      gcc_assert (to == STACK_POINTER_REGNUM);
2241      return frame_size + stack_size;
2242    }
2243
2244  gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2245  return stack_size;
2246}
2247
2248/* Decide if a variable should go into one of the small data sections.  */
2249
2250static bool
2251rx_in_small_data (const_tree decl)
2252{
2253  int size;
2254  const char * section;
2255
2256  if (rx_small_data_limit == 0)
2257    return false;
2258
2259  if (TREE_CODE (decl) != VAR_DECL)
2260    return false;
2261
2262  /* We do not put read-only variables into a small data area because
2263     they would be placed with the other read-only sections, far away
2264     from the read-write data sections, and we only have one small
2265     data area pointer.
2266     Similarly commons are placed in the .bss section which might be
2267     far away (and out of alignment with respect to) the .data section.  */
2268  if (TREE_READONLY (decl) || DECL_COMMON (decl))
2269    return false;
2270
2271  section = DECL_SECTION_NAME (decl);
2272  if (section)
2273    return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
2274
2275  size = int_size_in_bytes (TREE_TYPE (decl));
2276
2277  return (size > 0) && (size <= rx_small_data_limit);
2278}
2279
2280/* Return a section for X.
2281   The only special thing we do here is to honor small data.  */
2282
2283static section *
2284rx_select_rtx_section (machine_mode mode,
2285		       rtx x,
2286		       unsigned HOST_WIDE_INT align)
2287{
2288  if (rx_small_data_limit > 0
2289      && GET_MODE_SIZE (mode) <= rx_small_data_limit
2290      && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2291    return sdata_section;
2292
2293  return default_elf_select_rtx_section (mode, x, align);
2294}
2295
2296static section *
2297rx_select_section (tree decl,
2298		   int reloc,
2299		   unsigned HOST_WIDE_INT align)
2300{
2301  if (rx_small_data_limit > 0)
2302    {
2303      switch (categorize_decl_for_section (decl, reloc))
2304	{
2305	case SECCAT_SDATA:	return sdata_section;
2306	case SECCAT_SBSS:	return sbss_section;
2307	case SECCAT_SRODATA:
2308	  /* Fall through.  We do not put small, read only
2309	     data into the C_2 section because we are not
2310	     using the C_2 section.  We do not use the C_2
2311	     section because it is located with the other
2312	     read-only data sections, far away from the read-write
2313	     data sections and we only have one small data
2314	     pointer (r13).  */
2315	default:
2316	  break;
2317	}
2318    }
2319
2320  /* If we are supporting the Renesas assembler
2321     we cannot use mergeable sections.  */
2322  if (TARGET_AS100_SYNTAX)
2323    switch (categorize_decl_for_section (decl, reloc))
2324      {
2325      case SECCAT_RODATA_MERGE_CONST:
2326      case SECCAT_RODATA_MERGE_STR_INIT:
2327      case SECCAT_RODATA_MERGE_STR:
2328	return readonly_data_section;
2329
2330      default:
2331	break;
2332      }
2333
2334  return default_elf_select_section (decl, reloc, align);
2335}
2336
2337enum rx_builtin
2338{
2339  RX_BUILTIN_BRK,
2340  RX_BUILTIN_CLRPSW,
2341  RX_BUILTIN_INT,
2342  RX_BUILTIN_MACHI,
2343  RX_BUILTIN_MACLO,
2344  RX_BUILTIN_MULHI,
2345  RX_BUILTIN_MULLO,
2346  RX_BUILTIN_MVFACHI,
2347  RX_BUILTIN_MVFACMI,
2348  RX_BUILTIN_MVFC,
2349  RX_BUILTIN_MVTACHI,
2350  RX_BUILTIN_MVTACLO,
2351  RX_BUILTIN_MVTC,
2352  RX_BUILTIN_MVTIPL,
2353  RX_BUILTIN_RACW,
2354  RX_BUILTIN_REVW,
2355  RX_BUILTIN_RMPA,
2356  RX_BUILTIN_ROUND,
2357  RX_BUILTIN_SETPSW,
2358  RX_BUILTIN_WAIT,
2359  RX_BUILTIN_max
2360};
2361
2362static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2363
2364static void
2365rx_init_builtins (void)
2366{
2367#define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE)		\
2368   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2369   add_builtin_function ("__builtin_rx_" LC_NAME,			\
2370			build_function_type_list (RET_TYPE##_type_node, \
2371						  NULL_TREE),		\
2372			RX_BUILTIN_##UC_NAME,				\
2373			BUILT_IN_MD, NULL, NULL_TREE)
2374
2375#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE)		\
2376   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2377   add_builtin_function ("__builtin_rx_" LC_NAME,			\
2378			build_function_type_list (RET_TYPE##_type_node, \
2379						  ARG_TYPE##_type_node, \
2380						  NULL_TREE),		\
2381			RX_BUILTIN_##UC_NAME,				\
2382			BUILT_IN_MD, NULL, NULL_TREE)
2383
2384#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2385  rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2386  add_builtin_function ("__builtin_rx_" LC_NAME,			\
2387			build_function_type_list (RET_TYPE##_type_node, \
2388						  ARG_TYPE1##_type_node,\
2389						  ARG_TYPE2##_type_node,\
2390						  NULL_TREE),		\
2391			RX_BUILTIN_##UC_NAME,				\
2392			BUILT_IN_MD, NULL, NULL_TREE)
2393
2394#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2395  rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2396  add_builtin_function ("__builtin_rx_" LC_NAME,			\
2397			build_function_type_list (RET_TYPE##_type_node, \
2398						  ARG_TYPE1##_type_node,\
2399						  ARG_TYPE2##_type_node,\
2400						  ARG_TYPE3##_type_node,\
2401						  NULL_TREE),		\
2402			RX_BUILTIN_##UC_NAME,				\
2403			BUILT_IN_MD, NULL, NULL_TREE)
2404
2405  ADD_RX_BUILTIN0 (BRK,     "brk",     void);
2406  ADD_RX_BUILTIN1 (CLRPSW,  "clrpsw",  void,  integer);
2407  ADD_RX_BUILTIN1 (SETPSW,  "setpsw",  void,  integer);
2408  ADD_RX_BUILTIN1 (INT,     "int",     void,  integer);
2409  ADD_RX_BUILTIN2 (MACHI,   "machi",   void,  intSI, intSI);
2410  ADD_RX_BUILTIN2 (MACLO,   "maclo",   void,  intSI, intSI);
2411  ADD_RX_BUILTIN2 (MULHI,   "mulhi",   void,  intSI, intSI);
2412  ADD_RX_BUILTIN2 (MULLO,   "mullo",   void,  intSI, intSI);
2413  ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2414  ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
2415  ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void,  intSI);
2416  ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void,  intSI);
2417  ADD_RX_BUILTIN0 (RMPA,    "rmpa",    void);
2418  ADD_RX_BUILTIN1 (MVFC,    "mvfc",    intSI, integer);
2419  ADD_RX_BUILTIN2 (MVTC,    "mvtc",    void,  integer, integer);
2420  ADD_RX_BUILTIN1 (MVTIPL,  "mvtipl",  void,  integer);
2421  ADD_RX_BUILTIN1 (RACW,    "racw",    void,  integer);
2422  ADD_RX_BUILTIN1 (ROUND,   "round",   intSI, float);
2423  ADD_RX_BUILTIN1 (REVW,    "revw",    intSI, intSI);
2424  ADD_RX_BUILTIN0 (WAIT,    "wait",    void);
2425}
2426
2427/* Return the RX builtin for CODE.  */
2428
2429static tree
2430rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2431{
2432  if (code >= RX_BUILTIN_max)
2433    return error_mark_node;
2434
2435  return rx_builtins[code];
2436}
2437
2438static rtx
2439rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2440{
2441  if (reg && ! REG_P (arg))
2442    arg = force_reg (SImode, arg);
2443
2444  emit_insn (gen_func (arg));
2445
2446  return NULL_RTX;
2447}
2448
2449static rtx
2450rx_expand_builtin_mvtc (tree exp)
2451{
2452  rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2453  rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2454
2455  if (! CONST_INT_P (arg1))
2456    return NULL_RTX;
2457
2458  if (! REG_P (arg2))
2459    arg2 = force_reg (SImode, arg2);
2460
2461  emit_insn (gen_mvtc (arg1, arg2));
2462
2463  return NULL_RTX;
2464}
2465
2466static rtx
2467rx_expand_builtin_mvfc (tree t_arg, rtx target)
2468{
2469  rtx arg = expand_normal (t_arg);
2470
2471  if (! CONST_INT_P (arg))
2472    return NULL_RTX;
2473
2474  if (target == NULL_RTX)
2475    return NULL_RTX;
2476
2477  if (! REG_P (target))
2478    target = force_reg (SImode, target);
2479
2480  emit_insn (gen_mvfc (target, arg));
2481
2482  return target;
2483}
2484
2485static rtx
2486rx_expand_builtin_mvtipl (rtx arg)
2487{
2488  /* The RX610 does not support the MVTIPL instruction.  */
2489  if (rx_cpu_type == RX610)
2490    return NULL_RTX;
2491
2492  if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2493    return NULL_RTX;
2494
2495  emit_insn (gen_mvtipl (arg));
2496
2497  return NULL_RTX;
2498}
2499
2500static rtx
2501rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2502{
2503  rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2504  rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2505
2506  if (! REG_P (arg1))
2507    arg1 = force_reg (SImode, arg1);
2508
2509  if (! REG_P (arg2))
2510    arg2 = force_reg (SImode, arg2);
2511
2512  emit_insn (gen_func (arg1, arg2));
2513
2514  return NULL_RTX;
2515}
2516
2517static rtx
2518rx_expand_int_builtin_1_arg (rtx arg,
2519			     rtx target,
2520			     rtx (* gen_func)(rtx, rtx),
2521			     bool mem_ok)
2522{
2523  if (! REG_P (arg))
2524    if (!mem_ok || ! MEM_P (arg))
2525      arg = force_reg (SImode, arg);
2526
2527  if (target == NULL_RTX || ! REG_P (target))
2528    target = gen_reg_rtx (SImode);
2529
2530  emit_insn (gen_func (target, arg));
2531
2532  return target;
2533}
2534
2535static rtx
2536rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2537{
2538  if (target == NULL_RTX || ! REG_P (target))
2539    target = gen_reg_rtx (SImode);
2540
2541  emit_insn (gen_func (target));
2542
2543  return target;
2544}
2545
2546static rtx
2547rx_expand_builtin_round (rtx arg, rtx target)
2548{
2549  if ((! REG_P (arg) && ! MEM_P (arg))
2550      || GET_MODE (arg) != SFmode)
2551    arg = force_reg (SFmode, arg);
2552
2553  if (target == NULL_RTX || ! REG_P (target))
2554    target = gen_reg_rtx (SImode);
2555
2556  emit_insn (gen_lrintsf2 (target, arg));
2557
2558  return target;
2559}
2560
2561static int
2562valid_psw_flag (rtx op, const char *which)
2563{
2564  static int mvtc_inform_done = 0;
2565
2566  if (GET_CODE (op) == CONST_INT)
2567    switch (INTVAL (op))
2568      {
2569      case 0: case 'c': case 'C':
2570      case 1: case 'z': case 'Z':
2571      case 2: case 's': case 'S':
2572      case 3: case 'o': case 'O':
2573      case 8: case 'i': case 'I':
2574      case 9: case 'u': case 'U':
2575	return 1;
2576      }
2577
2578  error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2579  if (!mvtc_inform_done)
2580    error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2581  mvtc_inform_done = 1;
2582
2583  return 0;
2584}
2585
2586static rtx
2587rx_expand_builtin (tree exp,
2588		   rtx target,
2589		   rtx subtarget ATTRIBUTE_UNUSED,
2590		   machine_mode mode ATTRIBUTE_UNUSED,
2591		   int ignore ATTRIBUTE_UNUSED)
2592{
2593  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2594  tree arg    = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2595  rtx  op     = arg ? expand_normal (arg) : NULL_RTX;
2596  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2597
2598  switch (fcode)
2599    {
2600    case RX_BUILTIN_BRK:     emit_insn (gen_brk ()); return NULL_RTX;
2601    case RX_BUILTIN_CLRPSW:
2602      if (!valid_psw_flag (op, "clrpsw"))
2603	return NULL_RTX;
2604      return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2605    case RX_BUILTIN_SETPSW:
2606      if (!valid_psw_flag (op, "setpsw"))
2607	return NULL_RTX;
2608      return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2609    case RX_BUILTIN_INT:     return rx_expand_void_builtin_1_arg
2610	(op, gen_int, false);
2611    case RX_BUILTIN_MACHI:   return rx_expand_builtin_mac (exp, gen_machi);
2612    case RX_BUILTIN_MACLO:   return rx_expand_builtin_mac (exp, gen_maclo);
2613    case RX_BUILTIN_MULHI:   return rx_expand_builtin_mac (exp, gen_mulhi);
2614    case RX_BUILTIN_MULLO:   return rx_expand_builtin_mac (exp, gen_mullo);
2615    case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2616	(target, gen_mvfachi);
2617    case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2618	(target, gen_mvfacmi);
2619    case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2620	(op, gen_mvtachi, true);
2621    case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2622	(op, gen_mvtaclo, true);
2623    case RX_BUILTIN_RMPA:    emit_insn (gen_rmpa ()); return NULL_RTX;
2624    case RX_BUILTIN_MVFC:    return rx_expand_builtin_mvfc (arg, target);
2625    case RX_BUILTIN_MVTC:    return rx_expand_builtin_mvtc (exp);
2626    case RX_BUILTIN_MVTIPL:  return rx_expand_builtin_mvtipl (op);
2627    case RX_BUILTIN_RACW:    return rx_expand_void_builtin_1_arg
2628	(op, gen_racw, false);
2629    case RX_BUILTIN_ROUND:   return rx_expand_builtin_round (op, target);
2630    case RX_BUILTIN_REVW:    return rx_expand_int_builtin_1_arg
2631	(op, target, gen_revw, false);
2632    case RX_BUILTIN_WAIT:    emit_insn (gen_wait ()); return NULL_RTX;
2633
2634    default:
2635      internal_error ("bad builtin code");
2636      break;
2637    }
2638
2639  return NULL_RTX;
2640}
2641
2642/* Place an element into a constructor or destructor section.
2643   Like default_ctor_section_asm_out_constructor in varasm.c
2644   except that it uses .init_array (or .fini_array) and it
2645   handles constructor priorities.  */
2646
2647static void
2648rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2649{
2650  section * s;
2651
2652  if (priority != DEFAULT_INIT_PRIORITY)
2653    {
2654      char buf[18];
2655
2656      sprintf (buf, "%s.%.5u",
2657	       is_ctor ? ".init_array" : ".fini_array",
2658	       priority);
2659      s = get_section (buf, SECTION_WRITE, NULL_TREE);
2660    }
2661  else if (is_ctor)
2662    s = ctors_section;
2663  else
2664    s = dtors_section;
2665
2666  switch_to_section (s);
2667  assemble_align (POINTER_SIZE);
2668  assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2669}
2670
2671static void
2672rx_elf_asm_constructor (rtx symbol, int priority)
2673{
2674  rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2675}
2676
2677static void
2678rx_elf_asm_destructor (rtx symbol, int priority)
2679{
2680  rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2681}
2682
2683/* Check "fast_interrupt", "interrupt" and "naked" attributes.  */
2684
2685static tree
2686rx_handle_func_attribute (tree * node,
2687			  tree   name,
2688			  tree   args ATTRIBUTE_UNUSED,
2689			  int    flags ATTRIBUTE_UNUSED,
2690			  bool * no_add_attrs)
2691{
2692  gcc_assert (DECL_P (* node));
2693
2694  if (TREE_CODE (* node) != FUNCTION_DECL)
2695    {
2696      warning (OPT_Wattributes, "%qE attribute only applies to functions",
2697	       name);
2698      * no_add_attrs = true;
2699    }
2700
2701  /* FIXME: We ought to check for conflicting attributes.  */
2702
2703  /* FIXME: We ought to check that the interrupt and exception
2704     handler attributes have been applied to void functions.  */
2705  return NULL_TREE;
2706}
2707
2708/* Check "vector" attribute.  */
2709
2710static tree
2711rx_handle_vector_attribute (tree * node,
2712			    tree   name,
2713			    tree   args,
2714			    int    flags ATTRIBUTE_UNUSED,
2715			    bool * no_add_attrs)
2716{
2717  gcc_assert (DECL_P (* node));
2718  gcc_assert (args != NULL_TREE);
2719
2720  if (TREE_CODE (* node) != FUNCTION_DECL)
2721    {
2722      warning (OPT_Wattributes, "%qE attribute only applies to functions",
2723	       name);
2724      * no_add_attrs = true;
2725    }
2726
2727  return NULL_TREE;
2728}
2729
2730/* Table of RX specific attributes.  */
2731const struct attribute_spec rx_attribute_table[] =
2732{
2733  /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2734     affects_type_identity.  */
2735  { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2736    false },
2737  { "interrupt",      0, -1, true, false, false, rx_handle_func_attribute,
2738    false },
2739  { "naked",          0, 0, true, false, false, rx_handle_func_attribute,
2740    false },
2741  { "vector",         1, -1, true, false, false, rx_handle_vector_attribute,
2742    false },
2743  { NULL,             0, 0, false, false, false, NULL, false }
2744};
2745
2746/* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE.  */
2747
2748static void
2749rx_override_options_after_change (void)
2750{
2751  static bool first_time = TRUE;
2752
2753  if (first_time)
2754    {
2755      /* If this is the first time through and the user has not disabled
2756	 the use of RX FPU hardware then enable -ffinite-math-only,
2757	 since the FPU instructions do not support NaNs and infinities.  */
2758      if (TARGET_USE_FPU)
2759	flag_finite_math_only = 1;
2760
2761      first_time = FALSE;
2762    }
2763  else
2764    {
2765      /* Alert the user if they are changing the optimization options
2766	 to use IEEE compliant floating point arithmetic with RX FPU insns.  */
2767      if (TARGET_USE_FPU
2768	  && !flag_finite_math_only)
2769	warning (0, "RX FPU instructions do not support NaNs and infinities");
2770    }
2771}
2772
2773static void
2774rx_option_override (void)
2775{
2776  unsigned int i;
2777  cl_deferred_option *opt;
2778  vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2779
2780  if (v)
2781    FOR_EACH_VEC_ELT (*v, i, opt)
2782      {
2783	switch (opt->opt_index)
2784	  {
2785	  case OPT_mint_register_:
2786	    switch (opt->value)
2787	      {
2788	      case 4:
2789		fixed_regs[10] = call_used_regs [10] = 1;
2790		/* Fall through.  */
2791	      case 3:
2792		fixed_regs[11] = call_used_regs [11] = 1;
2793		/* Fall through.  */
2794	      case 2:
2795		fixed_regs[12] = call_used_regs [12] = 1;
2796		/* Fall through.  */
2797	      case 1:
2798		fixed_regs[13] = call_used_regs [13] = 1;
2799		/* Fall through.  */
2800	      case 0:
2801		rx_num_interrupt_regs = opt->value;
2802		break;
2803	      default:
2804		rx_num_interrupt_regs = 0;
2805		/* Error message already given because rx_handle_option
2806		  returned false.  */
2807		break;
2808	      }
2809	    break;
2810
2811	  default:
2812	    gcc_unreachable ();
2813	  }
2814      }
2815
2816  /* This target defaults to strict volatile bitfields.  */
2817  if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2818    flag_strict_volatile_bitfields = 1;
2819
2820  rx_override_options_after_change ();
2821
2822  /* These values are bytes, not log.  */
2823  if (align_jumps == 0 && ! optimize_size)
2824    align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2825  if (align_loops == 0 && ! optimize_size)
2826    align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2827  if (align_labels == 0 && ! optimize_size)
2828    align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2829}
2830
2831
2832static bool
2833rx_allocate_stack_slots_for_args (void)
2834{
2835  /* Naked functions should not allocate stack slots for arguments.  */
2836  return ! is_naked_func (NULL_TREE);
2837}
2838
2839static bool
2840rx_func_attr_inlinable (const_tree decl)
2841{
2842  return ! is_fast_interrupt_func (decl)
2843    &&   ! is_interrupt_func (decl)
2844    &&   ! is_naked_func (decl);
2845}
2846
2847static bool
2848rx_warn_func_return (tree decl)
2849{
2850  /* Naked functions are implemented entirely in assembly, including the
2851     return sequence, so suppress warnings about this.  */
2852  return !is_naked_func (decl);
2853}
2854
2855/* Return nonzero if it is ok to make a tail-call to DECL,
2856   a function_decl or NULL if this is an indirect call, using EXP  */
2857
2858static bool
2859rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2860{
2861  /* Do not allow indirect tailcalls.  The
2862     sibcall patterns do not support them.  */
2863  if (decl == NULL)
2864    return false;
2865
2866  /* Never tailcall from inside interrupt handlers or naked functions.  */
2867  if (is_fast_interrupt_func (NULL_TREE)
2868      || is_interrupt_func (NULL_TREE)
2869      || is_naked_func (NULL_TREE))
2870    return false;
2871
2872  return true;
2873}
2874
2875static void
2876rx_file_start (void)
2877{
2878  if (! TARGET_AS100_SYNTAX)
2879    default_file_start ();
2880}
2881
2882static bool
2883rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2884{
2885  /* The packed attribute overrides the MS behaviour.  */
2886  return ! TYPE_PACKED (record_type);
2887}
2888
2889/* Returns true if X a legitimate constant for an immediate
2890   operand on the RX.  X is already known to satisfy CONSTANT_P.  */
2891
2892bool
2893rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2894{
2895  switch (GET_CODE (x))
2896    {
2897    case CONST:
2898      x = XEXP (x, 0);
2899
2900      if (GET_CODE (x) == PLUS)
2901	{
2902	  if (! CONST_INT_P (XEXP (x, 1)))
2903	    return false;
2904
2905	  /* GCC would not pass us CONST_INT + CONST_INT so we
2906	     know that we have {SYMBOL|LABEL} + CONST_INT.  */
2907	  x = XEXP (x, 0);
2908	  gcc_assert (! CONST_INT_P (x));
2909	}
2910
2911      switch (GET_CODE (x))
2912	{
2913	case LABEL_REF:
2914	case SYMBOL_REF:
2915	  return true;
2916
2917	case UNSPEC:
2918	  return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2919
2920	default:
2921	  /* FIXME: Can this ever happen ?  */
2922	  gcc_unreachable ();
2923	}
2924      break;
2925
2926    case LABEL_REF:
2927    case SYMBOL_REF:
2928      return true;
2929    case CONST_DOUBLE:
2930      return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2931    case CONST_VECTOR:
2932      return false;
2933    default:
2934      gcc_assert (CONST_INT_P (x));
2935      break;
2936    }
2937
2938  return ok_for_max_constant (INTVAL (x));
2939}
2940
2941static int
2942rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
2943		 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2944{
2945  rtx a, b;
2946
2947  if (GET_CODE (addr) != PLUS)
2948    return COSTS_N_INSNS (1);
2949
2950  a = XEXP (addr, 0);
2951  b = XEXP (addr, 1);
2952
2953  if (REG_P (a) && REG_P (b))
2954    /* Try to discourage REG+REG addressing as it keeps two registers live.  */
2955    return COSTS_N_INSNS (4);
2956
2957  if (speed)
2958    /* [REG+OFF] is just as fast as [REG].  */
2959    return COSTS_N_INSNS (1);
2960
2961  if (CONST_INT_P (b)
2962      && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2963    /* Try to discourage REG + <large OFF> when optimizing for size.  */
2964    return COSTS_N_INSNS (2);
2965
2966  return COSTS_N_INSNS (1);
2967}
2968
2969static bool
2970rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2971{
2972  /* We can always eliminate to the frame pointer.
2973     We can eliminate to the stack pointer unless a frame
2974     pointer is needed.  */
2975
2976  return to == FRAME_POINTER_REGNUM
2977    || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2978}
2979
2980
2981static void
2982rx_trampoline_template (FILE * file)
2983{
2984  /* Output assembler code for a block containing the constant
2985     part of a trampoline, leaving space for the variable parts.
2986
2987     On the RX, (where r8 is the static chain regnum) the trampoline
2988     looks like:
2989
2990	   mov 		#<static chain value>, r8
2991	   mov          #<function's address>, r9
2992	   jmp		r9
2993
2994     In big-endian-data-mode however instructions are read into the CPU
2995     4 bytes at a time.  These bytes are then swapped around before being
2996     passed to the decoder.  So...we must partition our trampoline into
2997     4 byte packets and swap these packets around so that the instruction
2998     reader will reverse the process.  But, in order to avoid splitting
2999     the 32-bit constants across these packet boundaries, (making inserting
3000     them into the constructed trampoline very difficult) we have to pad the
3001     instruction sequence with NOP insns.  ie:
3002
3003           nop
3004	   nop
3005           mov.l	#<...>, r8
3006	   nop
3007	   nop
3008           mov.l	#<...>, r9
3009           jmp		r9
3010	   nop
3011	   nop             */
3012
3013  if (! TARGET_BIG_ENDIAN_DATA)
3014    {
3015      asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3016      asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3017      asm_fprintf (file, "\tjmp\tr%d\n",                TRAMPOLINE_TEMP_REGNUM);
3018    }
3019  else
3020    {
3021      char r8 = '0' + STATIC_CHAIN_REGNUM;
3022      char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3023
3024      if (TARGET_AS100_SYNTAX)
3025        {
3026          asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r8);
3027          asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
3028          asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r9);
3029          asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
3030          asm_fprintf (file, "\t.BYTE 003H,  003H, 00%cH, 07fH\n", r9);
3031        }
3032      else
3033        {
3034          asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r8);
3035          asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
3036          asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r9);
3037          asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
3038          asm_fprintf (file, "\t.byte 0x03,  0x03, 0x0%c, 0x7f\n", r9);
3039        }
3040    }
3041}
3042
3043static void
3044rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3045{
3046  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3047
3048  emit_block_move (tramp, assemble_trampoline_template (),
3049		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3050
3051  if (TARGET_BIG_ENDIAN_DATA)
3052    {
3053      emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3054      emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3055    }
3056  else
3057    {
3058      emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3059      emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3060    }
3061}
3062
3063static int
3064rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3065		     reg_class_t regclass ATTRIBUTE_UNUSED,
3066		     bool in)
3067{
3068  return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
3069}
3070
3071/* Convert a CC_MODE to the set of flags that it represents.  */
3072
3073static unsigned int
3074flags_from_mode (machine_mode mode)
3075{
3076  switch (mode)
3077    {
3078    case CC_ZSmode:
3079      return CC_FLAG_S | CC_FLAG_Z;
3080    case CC_ZSOmode:
3081      return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3082    case CC_ZSCmode:
3083      return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3084    case CCmode:
3085      return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3086    case CC_Fmode:
3087      return CC_FLAG_FP;
3088    default:
3089      gcc_unreachable ();
3090    }
3091}
3092
3093/* Convert a set of flags to a CC_MODE that can implement it.  */
3094
3095static machine_mode
3096mode_from_flags (unsigned int f)
3097{
3098  if (f & CC_FLAG_FP)
3099    return CC_Fmode;
3100  if (f & CC_FLAG_O)
3101    {
3102      if (f & CC_FLAG_C)
3103	return CCmode;
3104      else
3105	return CC_ZSOmode;
3106    }
3107  else if (f & CC_FLAG_C)
3108    return CC_ZSCmode;
3109  else
3110    return CC_ZSmode;
3111}
3112
3113/* Convert an RTX_CODE to the set of flags needed to implement it.
3114   This assumes an integer comparison.  */
3115
3116static unsigned int
3117flags_from_code (enum rtx_code code)
3118{
3119  switch (code)
3120    {
3121    case LT:
3122    case GE:
3123      return CC_FLAG_S;
3124    case GT:
3125    case LE:
3126      return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3127    case GEU:
3128    case LTU:
3129      return CC_FLAG_C;
3130    case GTU:
3131    case LEU:
3132      return CC_FLAG_C | CC_FLAG_Z;
3133    case EQ:
3134    case NE:
3135      return CC_FLAG_Z;
3136    default:
3137      gcc_unreachable ();
3138    }
3139}
3140
3141/* Return a CC_MODE of which both M1 and M2 are subsets.  */
3142
3143static machine_mode
3144rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
3145{
3146  unsigned f;
3147
3148  /* Early out for identical modes.  */
3149  if (m1 == m2)
3150    return m1;
3151
3152  /* There's no valid combination for FP vs non-FP.  */
3153  f = flags_from_mode (m1) | flags_from_mode (m2);
3154  if (f & CC_FLAG_FP)
3155    return VOIDmode;
3156
3157  /* Otherwise, see what mode can implement all the flags.  */
3158  return mode_from_flags (f);
3159}
3160
3161/* Return the minimal CC mode needed to implement (CMP_CODE X Y).  */
3162
3163machine_mode
3164rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3165{
3166  if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3167    return CC_Fmode;
3168
3169  if (y != const0_rtx)
3170    return CCmode;
3171
3172  return mode_from_flags (flags_from_code (cmp_code));
3173}
3174
3175/* Split the conditional branch.  Emit (COMPARE C1 C2) into CC_REG with
3176   CC_MODE, and use that in branches based on that compare.  */
3177
3178void
3179rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
3180		  rtx c1, rtx c2, rtx label)
3181{
3182  rtx flags, x;
3183
3184  flags = gen_rtx_REG (cc_mode, CC_REG);
3185  x = gen_rtx_COMPARE (cc_mode, c1, c2);
3186  x = gen_rtx_SET (VOIDmode, flags, x);
3187  emit_insn (x);
3188
3189  x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3190  x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3191  x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3192  emit_jump_insn (x);
3193}
3194
3195/* A helper function for matching parallels that set the flags.  */
3196
3197bool
3198rx_match_ccmode (rtx insn, machine_mode cc_mode)
3199{
3200  rtx op1, flags;
3201  machine_mode flags_mode;
3202
3203  gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3204
3205  op1 = XVECEXP (PATTERN (insn), 0, 1);
3206  gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3207
3208  flags = SET_DEST (op1);
3209  flags_mode = GET_MODE (flags);
3210
3211  if (GET_MODE (SET_SRC (op1)) != flags_mode)
3212    return false;
3213  if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3214    return false;
3215
3216  /* Ensure that the mode of FLAGS is compatible with CC_MODE.  */
3217  if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3218    return false;
3219
3220  return true;
3221}
3222
3223int
3224rx_align_for_label (rtx lab, int uses_threshold)
3225{
3226  /* This is a simple heuristic to guess when an alignment would not be useful
3227     because the delay due to the inserted NOPs would be greater than the delay
3228     due to the misaligned branch.  If uses_threshold is zero then the alignment
3229     is always useful.  */
3230  if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3231    return 0;
3232
3233  if (optimize_size)
3234    return 0;
3235  /* These values are log, not bytes.  */
3236  if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3237    return 2; /* 4 bytes */
3238  return 3;   /* 8 bytes */
3239}
3240
3241static int
3242rx_max_skip_for_label (rtx_insn *lab)
3243{
3244  int opsize;
3245  rtx_insn *op;
3246
3247  if (optimize_size)
3248    return 0;
3249
3250  if (lab == NULL)
3251    return 0;
3252
3253  op = lab;
3254  do
3255    {
3256      op = next_nonnote_nondebug_insn (op);
3257    }
3258  while (op && (LABEL_P (op)
3259		|| (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3260  if (!op)
3261    return 0;
3262
3263  opsize = get_attr_length (op);
3264  if (opsize >= 0 && opsize < 8)
3265    return opsize - 1;
3266  return 0;
3267}
3268
3269/* Compute the real length of the extending load-and-op instructions.  */
3270
3271int
3272rx_adjust_insn_length (rtx_insn *insn, int current_length)
3273{
3274  rtx extend, mem, offset;
3275  bool zero;
3276  int factor;
3277
3278  if (!INSN_P (insn))
3279    return current_length;
3280
3281  switch (INSN_CODE (insn))
3282    {
3283    default:
3284      return current_length;
3285
3286    case CODE_FOR_plussi3_zero_extendhi:
3287    case CODE_FOR_andsi3_zero_extendhi:
3288    case CODE_FOR_iorsi3_zero_extendhi:
3289    case CODE_FOR_xorsi3_zero_extendhi:
3290    case CODE_FOR_divsi3_zero_extendhi:
3291    case CODE_FOR_udivsi3_zero_extendhi:
3292    case CODE_FOR_minussi3_zero_extendhi:
3293    case CODE_FOR_smaxsi3_zero_extendhi:
3294    case CODE_FOR_sminsi3_zero_extendhi:
3295    case CODE_FOR_multsi3_zero_extendhi:
3296    case CODE_FOR_comparesi3_zero_extendhi:
3297      zero = true;
3298      factor = 2;
3299      break;
3300
3301    case CODE_FOR_plussi3_sign_extendhi:
3302    case CODE_FOR_andsi3_sign_extendhi:
3303    case CODE_FOR_iorsi3_sign_extendhi:
3304    case CODE_FOR_xorsi3_sign_extendhi:
3305    case CODE_FOR_divsi3_sign_extendhi:
3306    case CODE_FOR_udivsi3_sign_extendhi:
3307    case CODE_FOR_minussi3_sign_extendhi:
3308    case CODE_FOR_smaxsi3_sign_extendhi:
3309    case CODE_FOR_sminsi3_sign_extendhi:
3310    case CODE_FOR_multsi3_sign_extendhi:
3311    case CODE_FOR_comparesi3_sign_extendhi:
3312      zero = false;
3313      factor = 2;
3314      break;
3315
3316    case CODE_FOR_plussi3_zero_extendqi:
3317    case CODE_FOR_andsi3_zero_extendqi:
3318    case CODE_FOR_iorsi3_zero_extendqi:
3319    case CODE_FOR_xorsi3_zero_extendqi:
3320    case CODE_FOR_divsi3_zero_extendqi:
3321    case CODE_FOR_udivsi3_zero_extendqi:
3322    case CODE_FOR_minussi3_zero_extendqi:
3323    case CODE_FOR_smaxsi3_zero_extendqi:
3324    case CODE_FOR_sminsi3_zero_extendqi:
3325    case CODE_FOR_multsi3_zero_extendqi:
3326    case CODE_FOR_comparesi3_zero_extendqi:
3327      zero = true;
3328      factor = 1;
3329      break;
3330
3331    case CODE_FOR_plussi3_sign_extendqi:
3332    case CODE_FOR_andsi3_sign_extendqi:
3333    case CODE_FOR_iorsi3_sign_extendqi:
3334    case CODE_FOR_xorsi3_sign_extendqi:
3335    case CODE_FOR_divsi3_sign_extendqi:
3336    case CODE_FOR_udivsi3_sign_extendqi:
3337    case CODE_FOR_minussi3_sign_extendqi:
3338    case CODE_FOR_smaxsi3_sign_extendqi:
3339    case CODE_FOR_sminsi3_sign_extendqi:
3340    case CODE_FOR_multsi3_sign_extendqi:
3341    case CODE_FOR_comparesi3_sign_extendqi:
3342      zero = false;
3343      factor = 1;
3344      break;
3345    }
3346
3347  /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))).  */
3348  extend = single_set (insn);
3349  gcc_assert (extend != NULL_RTX);
3350
3351  extend = SET_SRC (extend);
3352  if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3353      || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3354    extend = XEXP (extend, 0);
3355  else
3356    extend = XEXP (extend, 1);
3357
3358  gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3359	      || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3360
3361  mem = XEXP (extend, 0);
3362  gcc_checking_assert (MEM_P (mem));
3363  if (REG_P (XEXP (mem, 0)))
3364    return (zero && factor == 1) ? 2 : 3;
3365
3366  /* We are expecting: (MEM (PLUS (REG) (CONST_INT))).  */
3367  gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3368  gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3369
3370  offset = XEXP (XEXP (mem, 0), 1);
3371  gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3372
3373  if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3374    return (zero && factor == 1) ? 3 : 4;
3375
3376  return (zero && factor == 1) ? 4 : 5;
3377}
3378
3379static bool
3380rx_narrow_volatile_bitfield (void)
3381{
3382  return true;
3383}
3384
3385static bool
3386rx_ok_to_inline (tree caller, tree callee)
3387{
3388  /* Do not inline functions with local variables
3389     into a naked CALLER - naked function have no stack frame and
3390     locals need a frame in order to have somewhere to live.
3391
3392     Unfortunately we have no way to determine the presence of
3393     local variables in CALLEE, so we have to be cautious and
3394     assume that there might be some there.
3395
3396     We do allow inlining when CALLEE has the "inline" type
3397     modifier or the "always_inline" or "gnu_inline" attributes.  */
3398  return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3399    || DECL_DECLARED_INLINE_P (callee)
3400    || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3401    || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3402}
3403
3404static bool
3405rx_enable_lra (void)
3406{
3407  return TARGET_ENABLE_LRA;
3408}
3409
3410
3411#undef  TARGET_NARROW_VOLATILE_BITFIELD
3412#define TARGET_NARROW_VOLATILE_BITFIELD		rx_narrow_volatile_bitfield
3413
3414#undef  TARGET_CAN_INLINE_P
3415#define TARGET_CAN_INLINE_P			rx_ok_to_inline
3416
3417#undef  TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3418#define TARGET_ASM_JUMP_ALIGN_MAX_SKIP			rx_max_skip_for_label
3419#undef  TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3420#define TARGET_ASM_LOOP_ALIGN_MAX_SKIP			rx_max_skip_for_label
3421#undef  TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3422#define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP	rx_max_skip_for_label
3423#undef  TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3424#define TARGET_ASM_LABEL_ALIGN_MAX_SKIP			rx_max_skip_for_label
3425
3426#undef  TARGET_FUNCTION_VALUE
3427#define TARGET_FUNCTION_VALUE		rx_function_value
3428
3429#undef  TARGET_RETURN_IN_MSB
3430#define TARGET_RETURN_IN_MSB		rx_return_in_msb
3431
3432#undef  TARGET_IN_SMALL_DATA_P
3433#define TARGET_IN_SMALL_DATA_P		rx_in_small_data
3434
3435#undef  TARGET_RETURN_IN_MEMORY
3436#define TARGET_RETURN_IN_MEMORY		rx_return_in_memory
3437
3438#undef  TARGET_HAVE_SRODATA_SECTION
3439#define TARGET_HAVE_SRODATA_SECTION	true
3440
3441#undef	TARGET_ASM_SELECT_RTX_SECTION
3442#define	TARGET_ASM_SELECT_RTX_SECTION	rx_select_rtx_section
3443
3444#undef	TARGET_ASM_SELECT_SECTION
3445#define	TARGET_ASM_SELECT_SECTION	rx_select_section
3446
3447#undef  TARGET_INIT_BUILTINS
3448#define TARGET_INIT_BUILTINS		rx_init_builtins
3449
3450#undef  TARGET_BUILTIN_DECL
3451#define TARGET_BUILTIN_DECL		rx_builtin_decl
3452
3453#undef  TARGET_EXPAND_BUILTIN
3454#define TARGET_EXPAND_BUILTIN		rx_expand_builtin
3455
3456#undef  TARGET_ASM_CONSTRUCTOR
3457#define TARGET_ASM_CONSTRUCTOR		rx_elf_asm_constructor
3458
3459#undef  TARGET_ASM_DESTRUCTOR
3460#define TARGET_ASM_DESTRUCTOR		rx_elf_asm_destructor
3461
3462#undef  TARGET_STRUCT_VALUE_RTX
3463#define TARGET_STRUCT_VALUE_RTX		rx_struct_value_rtx
3464
3465#undef  TARGET_ATTRIBUTE_TABLE
3466#define TARGET_ATTRIBUTE_TABLE		rx_attribute_table
3467
3468#undef  TARGET_ASM_FILE_START
3469#define TARGET_ASM_FILE_START			rx_file_start
3470
3471#undef  TARGET_MS_BITFIELD_LAYOUT_P
3472#define TARGET_MS_BITFIELD_LAYOUT_P		rx_is_ms_bitfield_layout
3473
3474#undef  TARGET_LEGITIMATE_ADDRESS_P
3475#define TARGET_LEGITIMATE_ADDRESS_P		rx_is_legitimate_address
3476
3477#undef  TARGET_MODE_DEPENDENT_ADDRESS_P
3478#define TARGET_MODE_DEPENDENT_ADDRESS_P		rx_mode_dependent_address_p
3479
3480#undef  TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3481#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS	rx_allocate_stack_slots_for_args
3482
3483#undef  TARGET_ASM_FUNCTION_PROLOGUE
3484#define TARGET_ASM_FUNCTION_PROLOGUE 		rx_output_function_prologue
3485
3486#undef  TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3487#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P 	rx_func_attr_inlinable
3488
3489#undef  TARGET_FUNCTION_OK_FOR_SIBCALL
3490#define TARGET_FUNCTION_OK_FOR_SIBCALL		rx_function_ok_for_sibcall
3491
3492#undef  TARGET_FUNCTION_ARG
3493#define TARGET_FUNCTION_ARG     		rx_function_arg
3494
3495#undef  TARGET_FUNCTION_ARG_ADVANCE
3496#define TARGET_FUNCTION_ARG_ADVANCE     	rx_function_arg_advance
3497
3498#undef	TARGET_FUNCTION_ARG_BOUNDARY
3499#define	TARGET_FUNCTION_ARG_BOUNDARY		rx_function_arg_boundary
3500
3501#undef  TARGET_SET_CURRENT_FUNCTION
3502#define TARGET_SET_CURRENT_FUNCTION		rx_set_current_function
3503
3504#undef  TARGET_ASM_INTEGER
3505#define TARGET_ASM_INTEGER			rx_assemble_integer
3506
3507#undef  TARGET_USE_BLOCKS_FOR_CONSTANT_P
3508#define TARGET_USE_BLOCKS_FOR_CONSTANT_P	hook_bool_mode_const_rtx_true
3509
3510#undef  TARGET_MAX_ANCHOR_OFFSET
3511#define TARGET_MAX_ANCHOR_OFFSET		32
3512
3513#undef  TARGET_ADDRESS_COST
3514#define TARGET_ADDRESS_COST			rx_address_cost
3515
3516#undef  TARGET_CAN_ELIMINATE
3517#define TARGET_CAN_ELIMINATE			rx_can_eliminate
3518
3519#undef  TARGET_CONDITIONAL_REGISTER_USAGE
3520#define TARGET_CONDITIONAL_REGISTER_USAGE	rx_conditional_register_usage
3521
3522#undef  TARGET_ASM_TRAMPOLINE_TEMPLATE
3523#define TARGET_ASM_TRAMPOLINE_TEMPLATE		rx_trampoline_template
3524
3525#undef  TARGET_TRAMPOLINE_INIT
3526#define TARGET_TRAMPOLINE_INIT			rx_trampoline_init
3527
3528#undef  TARGET_PRINT_OPERAND
3529#define TARGET_PRINT_OPERAND			rx_print_operand
3530
3531#undef  TARGET_PRINT_OPERAND_ADDRESS
3532#define TARGET_PRINT_OPERAND_ADDRESS		rx_print_operand_address
3533
3534#undef  TARGET_CC_MODES_COMPATIBLE
3535#define TARGET_CC_MODES_COMPATIBLE		rx_cc_modes_compatible
3536
3537#undef  TARGET_MEMORY_MOVE_COST
3538#define TARGET_MEMORY_MOVE_COST			rx_memory_move_cost
3539
3540#undef  TARGET_OPTION_OVERRIDE
3541#define TARGET_OPTION_OVERRIDE			rx_option_override
3542
3543#undef  TARGET_PROMOTE_FUNCTION_MODE
3544#define TARGET_PROMOTE_FUNCTION_MODE		rx_promote_function_mode
3545
3546#undef  TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3547#define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE	rx_override_options_after_change
3548
3549#undef  TARGET_FLAGS_REGNUM
3550#define TARGET_FLAGS_REGNUM			CC_REG
3551
3552#undef  TARGET_LEGITIMATE_CONSTANT_P
3553#define TARGET_LEGITIMATE_CONSTANT_P		rx_is_legitimate_constant
3554
3555#undef  TARGET_LEGITIMIZE_ADDRESS
3556#define TARGET_LEGITIMIZE_ADDRESS		rx_legitimize_address
3557
3558#undef  TARGET_WARN_FUNC_RETURN
3559#define TARGET_WARN_FUNC_RETURN 		rx_warn_func_return
3560
3561#undef  TARGET_LRA_P
3562#define TARGET_LRA_P 				rx_enable_lra
3563
3564struct gcc_target targetm = TARGET_INITIALIZER;
3565
3566#include "gt-rx.h"
3567