1/* Operations with affine combinations of trees.
2   Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8Free Software Foundation; either version 3, or (at your option) any
9later version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "hash-set.h"
24#include "machmode.h"
25#include "vec.h"
26#include "double-int.h"
27#include "input.h"
28#include "alias.h"
29#include "symtab.h"
30#include "options.h"
31#include "wide-int.h"
32#include "inchash.h"
33#include "tree.h"
34#include "fold-const.h"
35#include "hashtab.h"
36#include "tm.h"
37#include "hard-reg-set.h"
38#include "function.h"
39#include "rtl.h"
40#include "flags.h"
41#include "statistics.h"
42#include "real.h"
43#include "fixed-value.h"
44#include "insn-config.h"
45#include "expmed.h"
46#include "dojump.h"
47#include "explow.h"
48#include "calls.h"
49#include "emit-rtl.h"
50#include "varasm.h"
51#include "stmt.h"
52#include "expr.h"
53#include "tree-pretty-print.h"
54#include "tree-affine.h"
55#include "predict.h"
56#include "basic-block.h"
57#include "tree-ssa-alias.h"
58#include "internal-fn.h"
59#include "gimple-expr.h"
60#include "is-a.h"
61#include "gimple.h"
62#include "gimplify.h"
63#include "dumpfile.h"
64#include "cfgexpand.h"
65#include "wide-int-print.h"
66
67/* Extends CST as appropriate for the affine combinations COMB.  */
68
69widest_int
70wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
71{
72  return wi::sext (cst, TYPE_PRECISION (comb->type));
73}
74
75/* Initializes affine combination COMB so that its value is zero in TYPE.  */
76
77static void
78aff_combination_zero (aff_tree *comb, tree type)
79{
80  int i;
81  comb->type = type;
82  comb->offset = 0;
83  comb->n = 0;
84  for (i = 0; i < MAX_AFF_ELTS; i++)
85    comb->elts[i].coef = 0;
86  comb->rest = NULL_TREE;
87}
88
89/* Sets COMB to CST.  */
90
91void
92aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
93{
94  aff_combination_zero (comb, type);
95  comb->offset = wide_int_ext_for_comb (cst, comb);;
96}
97
98/* Sets COMB to single element ELT.  */
99
100void
101aff_combination_elt (aff_tree *comb, tree type, tree elt)
102{
103  aff_combination_zero (comb, type);
104
105  comb->n = 1;
106  comb->elts[0].val = elt;
107  comb->elts[0].coef = 1;
108}
109
110/* Scales COMB by SCALE.  */
111
112void
113aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
114{
115  unsigned i, j;
116
117  widest_int scale = wide_int_ext_for_comb (scale_in, comb);
118  if (scale == 1)
119    return;
120
121  if (scale == 0)
122    {
123      aff_combination_zero (comb, comb->type);
124      return;
125    }
126
127  comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
128  for (i = 0, j = 0; i < comb->n; i++)
129    {
130      widest_int new_coef
131	= wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
132      /* A coefficient may become zero due to overflow.  Remove the zero
133	 elements.  */
134      if (new_coef == 0)
135	continue;
136      comb->elts[j].coef = new_coef;
137      comb->elts[j].val = comb->elts[i].val;
138      j++;
139    }
140  comb->n = j;
141
142  if (comb->rest)
143    {
144      tree type = comb->type;
145      if (POINTER_TYPE_P (type))
146	type = sizetype;
147      if (comb->n < MAX_AFF_ELTS)
148	{
149	  comb->elts[comb->n].coef = scale;
150	  comb->elts[comb->n].val = comb->rest;
151	  comb->rest = NULL_TREE;
152	  comb->n++;
153	}
154      else
155	comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
156				  wide_int_to_tree (type, scale));
157    }
158}
159
160/* Adds ELT * SCALE to COMB.  */
161
162void
163aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
164{
165  unsigned i;
166  tree type;
167
168  widest_int scale = wide_int_ext_for_comb (scale_in, comb);
169  if (scale == 0)
170    return;
171
172  for (i = 0; i < comb->n; i++)
173    if (operand_equal_p (comb->elts[i].val, elt, 0))
174      {
175	widest_int new_coef
176	  = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
177	if (new_coef != 0)
178	  {
179	    comb->elts[i].coef = new_coef;
180	    return;
181	  }
182
183	comb->n--;
184	comb->elts[i] = comb->elts[comb->n];
185
186	if (comb->rest)
187	  {
188	    gcc_assert (comb->n == MAX_AFF_ELTS - 1);
189	    comb->elts[comb->n].coef = 1;
190	    comb->elts[comb->n].val = comb->rest;
191	    comb->rest = NULL_TREE;
192	    comb->n++;
193	  }
194	return;
195      }
196  if (comb->n < MAX_AFF_ELTS)
197    {
198      comb->elts[comb->n].coef = scale;
199      comb->elts[comb->n].val = elt;
200      comb->n++;
201      return;
202    }
203
204  type = comb->type;
205  if (POINTER_TYPE_P (type))
206    type = sizetype;
207
208  if (scale == 1)
209    elt = fold_convert (type, elt);
210  else
211    elt = fold_build2 (MULT_EXPR, type,
212		       fold_convert (type, elt),
213		       wide_int_to_tree (type, scale));
214
215  if (comb->rest)
216    comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
217			      elt);
218  else
219    comb->rest = elt;
220}
221
222/* Adds CST to C.  */
223
224static void
225aff_combination_add_cst (aff_tree *c, const widest_int &cst)
226{
227  c->offset = wide_int_ext_for_comb (c->offset + cst, c);
228}
229
230/* Adds COMB2 to COMB1.  */
231
232void
233aff_combination_add (aff_tree *comb1, aff_tree *comb2)
234{
235  unsigned i;
236
237  aff_combination_add_cst (comb1, comb2->offset);
238  for (i = 0; i < comb2->n; i++)
239    aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
240  if (comb2->rest)
241    aff_combination_add_elt (comb1, comb2->rest, 1);
242}
243
244/* Converts affine combination COMB to TYPE.  */
245
246void
247aff_combination_convert (aff_tree *comb, tree type)
248{
249  unsigned i, j;
250  tree comb_type = comb->type;
251
252  if  (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
253    {
254      tree val = fold_convert (type, aff_combination_to_tree (comb));
255      tree_to_aff_combination (val, type, comb);
256      return;
257    }
258
259  comb->type = type;
260  if (comb->rest && !POINTER_TYPE_P (type))
261    comb->rest = fold_convert (type, comb->rest);
262
263  if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
264    return;
265
266  comb->offset = wide_int_ext_for_comb (comb->offset, comb);
267  for (i = j = 0; i < comb->n; i++)
268    {
269      if (comb->elts[i].coef == 0)
270	continue;
271      comb->elts[j].coef = comb->elts[i].coef;
272      comb->elts[j].val = fold_convert (type, comb->elts[i].val);
273      j++;
274    }
275
276  comb->n = j;
277  if (comb->n < MAX_AFF_ELTS && comb->rest)
278    {
279      comb->elts[comb->n].coef = 1;
280      comb->elts[comb->n].val = comb->rest;
281      comb->rest = NULL_TREE;
282      comb->n++;
283    }
284}
285
286/* Splits EXPR into an affine combination of parts.  */
287
288void
289tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
290{
291  aff_tree tmp;
292  enum tree_code code;
293  tree cst, core, toffset;
294  HOST_WIDE_INT bitpos, bitsize;
295  machine_mode mode;
296  int unsignedp, volatilep;
297
298  STRIP_NOPS (expr);
299
300  code = TREE_CODE (expr);
301  switch (code)
302    {
303    case INTEGER_CST:
304      aff_combination_const (comb, type, wi::to_widest (expr));
305      return;
306
307    case POINTER_PLUS_EXPR:
308      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
309      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
310      aff_combination_add (comb, &tmp);
311      return;
312
313    case PLUS_EXPR:
314    case MINUS_EXPR:
315      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
316      tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
317      if (code == MINUS_EXPR)
318	aff_combination_scale (&tmp, -1);
319      aff_combination_add (comb, &tmp);
320      return;
321
322    case MULT_EXPR:
323      cst = TREE_OPERAND (expr, 1);
324      if (TREE_CODE (cst) != INTEGER_CST)
325	break;
326      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
327      aff_combination_scale (comb, wi::to_widest (cst));
328      return;
329
330    case NEGATE_EXPR:
331      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
332      aff_combination_scale (comb, -1);
333      return;
334
335    case BIT_NOT_EXPR:
336      /* ~x = -x - 1 */
337      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
338      aff_combination_scale (comb, -1);
339      aff_combination_add_cst (comb, -1);
340      return;
341
342    case ADDR_EXPR:
343      /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR.  */
344      if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
345	{
346	  expr = TREE_OPERAND (expr, 0);
347	  tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
348	  tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
349	  aff_combination_add (comb, &tmp);
350	  return;
351	}
352      core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
353				  &toffset, &mode, &unsignedp, &volatilep,
354				  false);
355      if (bitpos % BITS_PER_UNIT != 0)
356	break;
357      aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
358      if (TREE_CODE (core) == MEM_REF)
359	{
360	  aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
361	  core = TREE_OPERAND (core, 0);
362	}
363      else
364	core = build_fold_addr_expr (core);
365
366      if (TREE_CODE (core) == ADDR_EXPR)
367	aff_combination_add_elt (comb, core, 1);
368      else
369	{
370	  tree_to_aff_combination (core, type, &tmp);
371	  aff_combination_add (comb, &tmp);
372	}
373      if (toffset)
374	{
375	  tree_to_aff_combination (toffset, type, &tmp);
376	  aff_combination_add (comb, &tmp);
377	}
378      return;
379
380    case MEM_REF:
381      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
382	tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
383				 type, comb);
384      else if (integer_zerop (TREE_OPERAND (expr, 1)))
385	{
386	  aff_combination_elt (comb, type, expr);
387	  return;
388	}
389      else
390	aff_combination_elt (comb, type,
391			     build2 (MEM_REF, TREE_TYPE (expr),
392				     TREE_OPERAND (expr, 0),
393				     build_int_cst
394				      (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
395      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
396      aff_combination_add (comb, &tmp);
397      return;
398
399    default:
400      break;
401    }
402
403  aff_combination_elt (comb, type, expr);
404}
405
406/* Creates EXPR + ELT * SCALE in TYPE.  EXPR is taken from affine
407   combination COMB.  */
408
409static tree
410add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
411		 aff_tree *comb ATTRIBUTE_UNUSED)
412{
413  enum tree_code code;
414  tree type1 = type;
415  if (POINTER_TYPE_P (type))
416    type1 = sizetype;
417
418  widest_int scale = wide_int_ext_for_comb (scale_in, comb);
419
420  if (scale == -1
421      && POINTER_TYPE_P (TREE_TYPE (elt)))
422    {
423      elt = convert_to_ptrofftype (elt);
424      elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
425      scale = 1;
426    }
427
428  if (scale == 1)
429    {
430      if (!expr)
431	{
432	  if (POINTER_TYPE_P (TREE_TYPE (elt)))
433	    return elt;
434	  else
435	    return fold_convert (type1, elt);
436	}
437
438      if (POINTER_TYPE_P (TREE_TYPE (expr)))
439	return fold_build_pointer_plus (expr, elt);
440      if (POINTER_TYPE_P (TREE_TYPE (elt)))
441	return fold_build_pointer_plus (elt, expr);
442      return fold_build2 (PLUS_EXPR, type1,
443			  expr, fold_convert (type1, elt));
444    }
445
446  if (scale == -1)
447    {
448      if (!expr)
449	return fold_build1 (NEGATE_EXPR, type1,
450			    fold_convert (type1, elt));
451
452      if (POINTER_TYPE_P (TREE_TYPE (expr)))
453	{
454	  elt = convert_to_ptrofftype (elt);
455	  elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
456	  return fold_build_pointer_plus (expr, elt);
457	}
458      return fold_build2 (MINUS_EXPR, type1,
459			  expr, fold_convert (type1, elt));
460    }
461
462  elt = fold_convert (type1, elt);
463  if (!expr)
464    return fold_build2 (MULT_EXPR, type1, elt,
465			wide_int_to_tree (type1, scale));
466
467  if (wi::neg_p (scale))
468    {
469      code = MINUS_EXPR;
470      scale = -scale;
471    }
472  else
473    code = PLUS_EXPR;
474
475  elt = fold_build2 (MULT_EXPR, type1, elt,
476		     wide_int_to_tree (type1, scale));
477  if (POINTER_TYPE_P (TREE_TYPE (expr)))
478    {
479      if (code == MINUS_EXPR)
480        elt = fold_build1 (NEGATE_EXPR, type1, elt);
481      return fold_build_pointer_plus (expr, elt);
482    }
483  return fold_build2 (code, type1, expr, elt);
484}
485
486/* Makes tree from the affine combination COMB.  */
487
488tree
489aff_combination_to_tree (aff_tree *comb)
490{
491  tree type = comb->type;
492  tree expr = NULL_TREE;
493  unsigned i;
494  widest_int off, sgn;
495  tree type1 = type;
496  if (POINTER_TYPE_P (type))
497    type1 = sizetype;
498
499  gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
500
501  for (i = 0; i < comb->n; i++)
502    expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
503			    comb);
504
505  if (comb->rest)
506    expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
507
508  /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
509     unsigned.  */
510  if (wi::neg_p (comb->offset))
511    {
512      off = -comb->offset;
513      sgn = -1;
514    }
515  else
516    {
517      off = comb->offset;
518      sgn = 1;
519    }
520  return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
521			  comb);
522}
523
524/* Copies the tree elements of COMB to ensure that they are not shared.  */
525
526void
527unshare_aff_combination (aff_tree *comb)
528{
529  unsigned i;
530
531  for (i = 0; i < comb->n; i++)
532    comb->elts[i].val = unshare_expr (comb->elts[i].val);
533  if (comb->rest)
534    comb->rest = unshare_expr (comb->rest);
535}
536
537/* Remove M-th element from COMB.  */
538
539void
540aff_combination_remove_elt (aff_tree *comb, unsigned m)
541{
542  comb->n--;
543  if (m <= comb->n)
544    comb->elts[m] = comb->elts[comb->n];
545  if (comb->rest)
546    {
547      comb->elts[comb->n].coef = 1;
548      comb->elts[comb->n].val = comb->rest;
549      comb->rest = NULL_TREE;
550      comb->n++;
551    }
552}
553
554/* Adds C * COEF * VAL to R.  VAL may be NULL, in that case only
555   C * COEF is added to R.  */
556
557
558static void
559aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
560			     aff_tree *r)
561{
562  unsigned i;
563  tree aval, type;
564
565  for (i = 0; i < c->n; i++)
566    {
567      aval = c->elts[i].val;
568      if (val)
569	{
570	  type = TREE_TYPE (aval);
571	  aval = fold_build2 (MULT_EXPR, type, aval,
572			      fold_convert (type, val));
573	}
574
575      aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
576    }
577
578  if (c->rest)
579    {
580      aval = c->rest;
581      if (val)
582	{
583	  type = TREE_TYPE (aval);
584	  aval = fold_build2 (MULT_EXPR, type, aval,
585			      fold_convert (type, val));
586	}
587
588      aff_combination_add_elt (r, aval, coef);
589    }
590
591  if (val)
592    aff_combination_add_elt (r, val, coef * c->offset);
593  else
594    aff_combination_add_cst (r, coef * c->offset);
595}
596
597/* Multiplies C1 by C2, storing the result to R  */
598
599void
600aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
601{
602  unsigned i;
603  gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
604
605  aff_combination_zero (r, c1->type);
606
607  for (i = 0; i < c2->n; i++)
608    aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
609  if (c2->rest)
610    aff_combination_add_product (c1, 1, c2->rest, r);
611  aff_combination_add_product (c1, c2->offset, NULL, r);
612}
613
614/* Returns the element of COMB whose value is VAL, or NULL if no such
615   element exists.  If IDX is not NULL, it is set to the index of VAL in
616   COMB.  */
617
618static struct aff_comb_elt *
619aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
620{
621  unsigned i;
622
623  for (i = 0; i < comb->n; i++)
624    if (operand_equal_p (comb->elts[i].val, val, 0))
625      {
626	if (idx)
627	  *idx = i;
628
629	return &comb->elts[i];
630      }
631
632  return NULL;
633}
634
635/* Element of the cache that maps ssa name NAME to its expanded form
636   as an affine expression EXPANSION.  */
637
638struct name_expansion
639{
640  aff_tree expansion;
641
642  /* True if the expansion for the name is just being generated.  */
643  unsigned in_progress : 1;
644};
645
646/* Expands SSA names in COMB recursively.  CACHE is used to cache the
647   results.  */
648
649void
650aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
651			hash_map<tree, name_expansion *> **cache)
652{
653  unsigned i;
654  aff_tree to_add, current, curre;
655  tree e, rhs;
656  gimple def;
657  widest_int scale;
658  struct name_expansion *exp;
659
660  aff_combination_zero (&to_add, comb->type);
661  for (i = 0; i < comb->n; i++)
662    {
663      tree type, name;
664      enum tree_code code;
665
666      e = comb->elts[i].val;
667      type = TREE_TYPE (e);
668      name = e;
669      /* Look through some conversions.  */
670      if (CONVERT_EXPR_P (e)
671          && (TYPE_PRECISION (type)
672	      >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
673	name = TREE_OPERAND (e, 0);
674      if (TREE_CODE (name) != SSA_NAME)
675	continue;
676      def = SSA_NAME_DEF_STMT (name);
677      if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
678	continue;
679
680      code = gimple_assign_rhs_code (def);
681      if (code != SSA_NAME
682	  && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
683	  && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
684	      || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
685	continue;
686
687      /* We do not know whether the reference retains its value at the
688	 place where the expansion is used.  */
689      if (TREE_CODE_CLASS (code) == tcc_reference)
690	continue;
691
692      if (!*cache)
693	*cache = new hash_map<tree, name_expansion *>;
694      name_expansion **slot = &(*cache)->get_or_insert (e);
695      exp = *slot;
696
697      if (!exp)
698	{
699	  exp = XNEW (struct name_expansion);
700	  exp->in_progress = 1;
701	  *slot = exp;
702	  /* In principle this is a generally valid folding, but
703	     it is not unconditionally an optimization, so do it
704	     here and not in fold_unary.  */
705	  /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
706	     than the type of X and overflow for the type of X is
707	     undefined.  */
708	  if (e != name
709	      && INTEGRAL_TYPE_P (type)
710	      && INTEGRAL_TYPE_P (TREE_TYPE (name))
711	      && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
712	      && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
713	      && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
714	      && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
715	    rhs = fold_build2 (code, type,
716			       fold_convert (type, gimple_assign_rhs1 (def)),
717			       fold_convert (type, gimple_assign_rhs2 (def)));
718	  else
719	    {
720	      rhs = gimple_assign_rhs_to_tree (def);
721	      if (e != name)
722		rhs = fold_convert (type, rhs);
723	    }
724	  tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
725	  exp->expansion = current;
726	  exp->in_progress = 0;
727	}
728      else
729	{
730	  /* Since we follow the definitions in the SSA form, we should not
731	     enter a cycle unless we pass through a phi node.  */
732	  gcc_assert (!exp->in_progress);
733	  current = exp->expansion;
734	}
735
736      /* Accumulate the new terms to TO_ADD, so that we do not modify
737	 COMB while traversing it; include the term -coef * E, to remove
738         it from COMB.  */
739      scale = comb->elts[i].coef;
740      aff_combination_zero (&curre, comb->type);
741      aff_combination_add_elt (&curre, e, -scale);
742      aff_combination_scale (&current, scale);
743      aff_combination_add (&to_add, &current);
744      aff_combination_add (&to_add, &curre);
745    }
746  aff_combination_add (comb, &to_add);
747}
748
749/* Similar to tree_to_aff_combination, but follows SSA name definitions
750   and expands them recursively.  CACHE is used to cache the expansions
751   of the ssa names, to avoid exponential time complexity for cases
752   like
753
754   a1 = a0 + a0;
755   a2 = a1 + a1;
756   a3 = a2 + a2;
757   ...  */
758
759void
760tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
761				hash_map<tree, name_expansion *> **cache)
762{
763  tree_to_aff_combination (expr, type, comb);
764  aff_combination_expand (comb, cache);
765}
766
767/* Frees memory occupied by struct name_expansion in *VALUE.  Callback for
768   hash_map::traverse.  */
769
770bool
771free_name_expansion (tree const &, name_expansion **value, void *)
772{
773  free (*value);
774  return true;
775}
776
777/* Frees memory allocated for the CACHE used by
778   tree_to_aff_combination_expand.  */
779
780void
781free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
782{
783  if (!*cache)
784    return;
785
786  (*cache)->traverse<void *, free_name_expansion> (NULL);
787  delete (*cache);
788  *cache = NULL;
789}
790
791/* If VAL != CST * DIV for any constant CST, returns false.
792   Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
793   and if they are different, returns false.  Finally, if neither of these
794   two cases occur, true is returned, and CST is stored to MULT and MULT_SET
795   is set to true.  */
796
797static bool
798wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
799			      bool *mult_set, widest_int *mult)
800{
801  widest_int rem, cst;
802
803  if (val == 0)
804    {
805      if (*mult_set && mult != 0)
806	return false;
807      *mult_set = true;
808      *mult = 0;
809      return true;
810    }
811
812  if (div == 0)
813    return false;
814
815  if (!wi::multiple_of_p (val, div, SIGNED, &cst))
816    return false;
817
818  if (*mult_set && *mult != cst)
819    return false;
820
821  *mult_set = true;
822  *mult = cst;
823  return true;
824}
825
826/* Returns true if VAL = X * DIV for some constant X.  If this is the case,
827   X is stored to MULT.  */
828
829bool
830aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
831				     widest_int *mult)
832{
833  bool mult_set = false;
834  unsigned i;
835
836  if (val->n == 0 && val->offset == 0)
837    {
838      *mult = 0;
839      return true;
840    }
841  if (val->n != div->n)
842    return false;
843
844  if (val->rest || div->rest)
845    return false;
846
847  if (!wide_int_constant_multiple_p (val->offset, div->offset,
848				     &mult_set, mult))
849    return false;
850
851  for (i = 0; i < div->n; i++)
852    {
853      struct aff_comb_elt *elt
854	      = aff_combination_find_elt (val, div->elts[i].val, NULL);
855      if (!elt)
856	return false;
857      if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
858					 &mult_set, mult))
859	return false;
860    }
861
862  gcc_assert (mult_set);
863  return true;
864}
865
866/* Prints the affine VAL to the FILE. */
867
868static void
869print_aff (FILE *file, aff_tree *val)
870{
871  unsigned i;
872  signop sgn = TYPE_SIGN (val->type);
873  if (POINTER_TYPE_P (val->type))
874    sgn = SIGNED;
875  fprintf (file, "{\n  type = ");
876  print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
877  fprintf (file, "\n  offset = ");
878  print_dec (val->offset, file, sgn);
879  if (val->n > 0)
880    {
881      fprintf (file, "\n  elements = {\n");
882      for (i = 0; i < val->n; i++)
883	{
884	  fprintf (file, "    [%d] = ", i);
885	  print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
886
887	  fprintf (file, " * ");
888	  print_dec (val->elts[i].coef, file, sgn);
889	  if (i != val->n - 1)
890	    fprintf (file, ", \n");
891	}
892      fprintf (file, "\n  }");
893  }
894  if (val->rest)
895    {
896      fprintf (file, "\n  rest = ");
897      print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
898    }
899  fprintf (file, "\n}");
900}
901
902/* Prints the affine VAL to the standard error, used for debugging.  */
903
904DEBUG_FUNCTION void
905debug_aff (aff_tree *val)
906{
907  print_aff (stderr, val);
908  fprintf (stderr, "\n");
909}
910
911/* Computes address of the reference REF in ADDR.  The size of the accessed
912   location is stored to SIZE.  Returns the ultimate containing object to
913   which REF refers.  */
914
915tree
916get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
917{
918  HOST_WIDE_INT bitsize, bitpos;
919  tree toff;
920  machine_mode mode;
921  int uns, vol;
922  aff_tree tmp;
923  tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
924				   &uns, &vol, false);
925  tree base_addr = build_fold_addr_expr (base);
926
927  /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT.  */
928
929  tree_to_aff_combination (base_addr, sizetype, addr);
930
931  if (toff)
932    {
933      tree_to_aff_combination (toff, sizetype, &tmp);
934      aff_combination_add (addr, &tmp);
935    }
936
937  aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
938  aff_combination_add (addr, &tmp);
939
940  *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
941
942  return base;
943}
944
945/* Returns true if a region of size SIZE1 at position 0 and a region of
946   size SIZE2 at position DIFF cannot overlap.  */
947
948bool
949aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
950			   const widest_int &size2)
951{
952  /* Unless the difference is a constant, we fail.  */
953  if (diff->n != 0)
954    return false;
955
956  if (wi::neg_p (diff->offset))
957    {
958      /* The second object is before the first one, we succeed if the last
959	 element of the second object is before the start of the first one.  */
960      return wi::neg_p (diff->offset + size2 - 1);
961    }
962  else
963    {
964      /* We succeed if the second object starts after the first one ends.  */
965      return wi::les_p (size1, diff->offset);
966    }
967}
968
969