Commit | Line | Data |
---|---|---|
9bf86ebb PR |
1 | /* Language-dependent node constructors for parse phase of GNU compiler. |
2 | Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc. | |
3 | Hacked by Michael Tiemann (tiemann@cygnus.com) | |
4 | ||
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include <stdio.h> | |
23 | #include "obstack.h" | |
24 | #include "tree.h" | |
25 | #include "cp-tree.h" | |
26 | #include "flags.h" | |
27 | ||
28 | #define CEIL(x,y) (((x) + (y) - 1) / (y)) | |
29 | ||
30 | /* Return nonzero if REF is an lvalue valid for this language. | |
31 | Lvalues can be assigned, unless they have TREE_READONLY. | |
32 | Lvalues can have their address taken, unless they have DECL_REGISTER. */ | |
33 | ||
34 | int | |
35 | lvalue_p (ref) | |
36 | tree ref; | |
37 | { | |
38 | register enum tree_code code = TREE_CODE (ref); | |
39 | ||
40 | if (language_lvalue_valid (ref)) | |
41 | switch (code) | |
42 | { | |
43 | /* preincrements and predecrements are valid lvals, provided | |
44 | what they refer to are valid lvals. */ | |
45 | case PREINCREMENT_EXPR: | |
46 | case PREDECREMENT_EXPR: | |
47 | case COMPONENT_REF: | |
48 | return lvalue_p (TREE_OPERAND (ref, 0)); | |
49 | ||
50 | case STRING_CST: | |
51 | return 1; | |
52 | ||
53 | case VAR_DECL: | |
54 | if (TREE_READONLY (ref) && ! TREE_STATIC (ref) | |
55 | && DECL_LANG_SPECIFIC (ref) | |
56 | && DECL_IN_AGGR_P (ref)) | |
57 | return 0; | |
58 | case INDIRECT_REF: | |
59 | case ARRAY_REF: | |
60 | case PARM_DECL: | |
61 | case RESULT_DECL: | |
62 | case ERROR_MARK: | |
63 | if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE | |
64 | && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE) | |
65 | return 1; | |
66 | break; | |
67 | ||
68 | case TARGET_EXPR: | |
69 | case WITH_CLEANUP_EXPR: | |
70 | return 1; | |
71 | ||
72 | case CALL_EXPR: | |
73 | if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE | |
74 | /* unary_complex_lvalue knows how to deal with this case. */ | |
75 | || TREE_ADDRESSABLE (TREE_TYPE (ref))) | |
76 | return 1; | |
77 | break; | |
78 | ||
79 | /* A currently unresolved scope ref. */ | |
80 | case SCOPE_REF: | |
81 | my_friendly_abort (103); | |
82 | case OFFSET_REF: | |
83 | if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL) | |
84 | return 1; | |
85 | if (TREE_CODE (TREE_OPERAND (ref, 1)) == VAR_DECL) | |
86 | if (TREE_READONLY (ref) && ! TREE_STATIC (ref) | |
87 | && DECL_LANG_SPECIFIC (ref) | |
88 | && DECL_IN_AGGR_P (ref)) | |
89 | return 0; | |
90 | else | |
91 | return 1; | |
92 | break; | |
93 | ||
94 | case ADDR_EXPR: | |
95 | /* ANSI C++ June 5 1992 WP 5.4.14. The result of a cast to a | |
96 | reference is an lvalue. */ | |
97 | if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE) | |
98 | return 1; | |
99 | break; | |
100 | } | |
101 | return 0; | |
102 | } | |
103 | ||
104 | /* Return nonzero if REF is an lvalue valid for this language; | |
105 | otherwise, print an error message and return zero. */ | |
106 | ||
107 | int | |
108 | lvalue_or_else (ref, string) | |
109 | tree ref; | |
110 | char *string; | |
111 | { | |
112 | int win = lvalue_p (ref); | |
113 | if (! win) | |
114 | error ("invalid lvalue in %s", string); | |
115 | return win; | |
116 | } | |
117 | ||
118 | /* INIT is a CALL_EXPR which needs info about its target. | |
119 | TYPE is the type that this initialization should appear to have. | |
120 | ||
121 | Build an encapsulation of the initialization to perform | |
122 | and return it so that it can be processed by language-independent | |
123 | and language-specific expression expanders. | |
124 | ||
125 | If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression. | |
126 | Otherwise, cleanups are not built here. For example, when building | |
127 | an initialization for a stack slot, since the called function handles | |
128 | the cleanup, we would not want to do it here. */ | |
129 | tree | |
130 | build_cplus_new (type, init, with_cleanup_p) | |
131 | tree type; | |
132 | tree init; | |
133 | int with_cleanup_p; | |
134 | { | |
135 | tree slot = build (VAR_DECL, type); | |
136 | tree rval = build (NEW_EXPR, type, | |
137 | TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot); | |
138 | TREE_SIDE_EFFECTS (rval) = 1; | |
139 | TREE_ADDRESSABLE (rval) = 1; | |
140 | rval = build (TARGET_EXPR, type, slot, rval, 0); | |
141 | TREE_SIDE_EFFECTS (rval) = 1; | |
142 | TREE_ADDRESSABLE (rval) = 1; | |
143 | ||
144 | if (with_cleanup_p && TYPE_NEEDS_DESTRUCTOR (type)) | |
145 | { | |
146 | rval = build (WITH_CLEANUP_EXPR, type, rval, 0, | |
147 | build_delete (TYPE_POINTER_TO (type), | |
148 | build_unary_op (ADDR_EXPR, slot, 0), | |
149 | integer_two_node, | |
150 | LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0, 0)); | |
151 | TREE_SIDE_EFFECTS (rval) = 1; | |
152 | } | |
153 | return rval; | |
154 | } | |
155 | ||
156 | /* Recursively search EXP for CALL_EXPRs that need cleanups and replace | |
157 | these CALL_EXPRs with tree nodes that will perform the cleanups. */ | |
158 | ||
159 | tree | |
160 | break_out_cleanups (exp) | |
161 | tree exp; | |
162 | { | |
163 | tree tmp = exp; | |
164 | ||
165 | if (TREE_CODE (tmp) == CALL_EXPR | |
166 | && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp))) | |
167 | return build_cplus_new (TREE_TYPE (tmp), tmp, 1); | |
168 | ||
169 | while (TREE_CODE (tmp) == NOP_EXPR | |
170 | || TREE_CODE (tmp) == CONVERT_EXPR | |
171 | || TREE_CODE (tmp) == NON_LVALUE_EXPR) | |
172 | { | |
173 | if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR | |
174 | && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0)))) | |
175 | { | |
176 | TREE_OPERAND (tmp, 0) | |
177 | = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)), | |
178 | TREE_OPERAND (tmp, 0), 1); | |
179 | break; | |
180 | } | |
181 | else | |
182 | tmp = TREE_OPERAND (tmp, 0); | |
183 | } | |
184 | return exp; | |
185 | } | |
186 | ||
187 | /* Recursively perform a preorder search EXP for CALL_EXPRs, making | |
188 | copies where they are found. Returns a deep copy all nodes transitively | |
189 | containing CALL_EXPRs. */ | |
190 | ||
191 | tree | |
192 | break_out_calls (exp) | |
193 | tree exp; | |
194 | { | |
195 | register tree t1, t2; | |
196 | register enum tree_code code; | |
197 | register int changed = 0; | |
198 | register int i; | |
199 | ||
200 | if (exp == NULL_TREE) | |
201 | return exp; | |
202 | ||
203 | code = TREE_CODE (exp); | |
204 | ||
205 | if (code == CALL_EXPR) | |
206 | return copy_node (exp); | |
207 | ||
208 | /* Don't try and defeat a save_expr, as it should only be done once. */ | |
209 | if (code == SAVE_EXPR) | |
210 | return exp; | |
211 | ||
212 | switch (TREE_CODE_CLASS (code)) | |
213 | { | |
214 | default: | |
215 | abort (); | |
216 | ||
217 | case 'c': /* a constant */ | |
218 | case 't': /* a type node */ | |
219 | case 'x': /* something random, like an identifier or an ERROR_MARK. */ | |
220 | return exp; | |
221 | ||
222 | case 'd': /* A decl node */ | |
223 | t1 = break_out_calls (DECL_INITIAL (exp)); | |
224 | if (t1 != DECL_INITIAL (exp)) | |
225 | { | |
226 | exp = copy_node (exp); | |
227 | DECL_INITIAL (exp) = t1; | |
228 | } | |
229 | return exp; | |
230 | ||
231 | case 'b': /* A block node */ | |
232 | { | |
233 | /* Don't know how to handle these correctly yet. Must do a | |
234 | break_out_calls on all DECL_INITIAL values for local variables, | |
235 | and also break_out_calls on all sub-blocks and sub-statements. */ | |
236 | abort (); | |
237 | } | |
238 | return exp; | |
239 | ||
240 | case 'e': /* an expression */ | |
241 | case 'r': /* a reference */ | |
242 | case 's': /* an expression with side effects */ | |
243 | for (i = tree_code_length[(int) code] - 1; i >= 0; i--) | |
244 | { | |
245 | t1 = break_out_calls (TREE_OPERAND (exp, i)); | |
246 | if (t1 != TREE_OPERAND (exp, i)) | |
247 | { | |
248 | if (changed++ == 0) | |
249 | exp = copy_node (exp); | |
250 | TREE_OPERAND (exp, i) = t1; | |
251 | } | |
252 | } | |
253 | return exp; | |
254 | ||
255 | case '<': /* a comparison expression */ | |
256 | case '2': /* a binary arithmetic expression */ | |
257 | t2 = break_out_calls (TREE_OPERAND (exp, 1)); | |
258 | if (t2 != TREE_OPERAND (exp, 1)) | |
259 | changed = 1; | |
260 | case '1': /* a unary arithmetic expression */ | |
261 | t1 = break_out_calls (TREE_OPERAND (exp, 0)); | |
262 | if (t1 != TREE_OPERAND (exp, 0)) | |
263 | changed = 1; | |
264 | if (changed) | |
265 | { | |
266 | if (tree_code_length[(int) code] == 1) | |
267 | return build1 (code, TREE_TYPE (exp), t1); | |
268 | else | |
269 | return build (code, TREE_TYPE (exp), t1, t2); | |
270 | } | |
271 | return exp; | |
272 | } | |
273 | ||
274 | } | |
275 | \f | |
276 | extern struct obstack *current_obstack; | |
277 | extern struct obstack permanent_obstack, class_obstack; | |
278 | extern struct obstack *saveable_obstack; | |
279 | ||
280 | /* Here is how primitive or already-canonicalized types' hash | |
281 | codes are made. MUST BE CONSISTENT WITH tree.c !!! */ | |
282 | #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777) | |
283 | ||
284 | /* Construct, lay out and return the type of methods belonging to class | |
285 | BASETYPE and whose arguments are described by ARGTYPES and whose values | |
286 | are described by RETTYPE. If each type exists already, reuse it. */ | |
287 | tree | |
288 | build_cplus_method_type (basetype, rettype, argtypes) | |
289 | tree basetype, rettype, argtypes; | |
290 | { | |
291 | register tree t; | |
292 | tree ptype = build_pointer_type (basetype); | |
293 | int hashcode; | |
294 | ||
295 | /* Make a node of the sort we want. */ | |
296 | t = make_node (METHOD_TYPE); | |
297 | ||
298 | TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype); | |
299 | TREE_TYPE (t) = rettype; | |
300 | #if 0 | |
301 | /* it is wrong to flag the object the pointer points to as readonly | |
302 | when flag_this_is_variable is 0. */ | |
303 | ptype = build_type_variant (ptype, flag_this_is_variable <= 0, 0); | |
304 | #else | |
305 | ptype = build_type_variant (ptype, 0, 0); | |
306 | #endif | |
307 | /* The actual arglist for this function includes a "hidden" argument | |
308 | which is "this". Put it into the list of argument types. */ | |
309 | ||
310 | TYPE_ARG_TYPES (t) = tree_cons (NULL, ptype, argtypes); | |
311 | ||
312 | /* If we already have such a type, use the old one and free this one. | |
313 | Note that it also frees up the above cons cell if found. */ | |
314 | hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes); | |
315 | t = type_hash_canon (hashcode, t); | |
316 | ||
317 | if (TYPE_SIZE (t) == 0) | |
318 | layout_type (t); | |
319 | ||
320 | return t; | |
321 | } | |
322 | ||
323 | tree | |
324 | build_cplus_staticfn_type (basetype, rettype, argtypes) | |
325 | tree basetype, rettype, argtypes; | |
326 | { | |
327 | register tree t; | |
328 | int hashcode; | |
329 | ||
330 | /* Make a node of the sort we want. */ | |
331 | t = make_node (FUNCTION_TYPE); | |
332 | ||
333 | TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype); | |
334 | TREE_TYPE (t) = rettype; | |
335 | ||
336 | /* The actual arglist for this function includes a "hidden" argument | |
337 | which is "this". Put it into the list of argument types. */ | |
338 | ||
339 | TYPE_ARG_TYPES (t) = argtypes; | |
340 | ||
341 | /* If we already have such a type, use the old one and free this one. | |
342 | Note that it also frees up the above cons cell if found. */ | |
343 | hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes); | |
344 | t = type_hash_canon (hashcode, t); | |
345 | ||
346 | if (TYPE_SIZE (t) == 0) | |
347 | layout_type (t); | |
348 | ||
349 | return t; | |
350 | } | |
351 | ||
352 | tree | |
353 | build_cplus_array_type (elt_type, index_type) | |
354 | tree elt_type; | |
355 | tree index_type; | |
356 | { | |
357 | register struct obstack *ambient_obstack = current_obstack; | |
358 | register struct obstack *ambient_saveable_obstack = saveable_obstack; | |
359 | tree t; | |
360 | ||
361 | /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent, | |
362 | make this permanent too. */ | |
363 | if (TREE_PERMANENT (elt_type) | |
364 | && (index_type == 0 || TREE_PERMANENT (index_type))) | |
365 | { | |
366 | current_obstack = &permanent_obstack; | |
367 | saveable_obstack = &permanent_obstack; | |
368 | } | |
369 | ||
370 | t = build_array_type (elt_type, index_type); | |
371 | ||
372 | /* Push these needs up so that initialization takes place | |
373 | more easily. */ | |
374 | TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type)); | |
375 | TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type)); | |
376 | current_obstack = ambient_obstack; | |
377 | saveable_obstack = ambient_saveable_obstack; | |
378 | return t; | |
379 | } | |
380 | \f | |
381 | /* Add OFFSET to all base types of T. | |
382 | ||
383 | OFFSET, which is a type offset, is number of bytes. | |
384 | ||
385 | Note that we don't have to worry about having two paths to the | |
386 | same base type, since this type owns its association list. */ | |
387 | void | |
388 | propagate_binfo_offsets (binfo, offset) | |
389 | tree binfo; | |
390 | tree offset; | |
391 | { | |
392 | tree binfos = BINFO_BASETYPES (binfo); | |
393 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
394 | ||
395 | for (i = 0; i < n_baselinks; /* note increment is done in the loop. */) | |
396 | { | |
397 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
398 | ||
399 | if (TREE_VIA_VIRTUAL (base_binfo)) | |
400 | i += 1; | |
401 | else | |
402 | { | |
403 | int j; | |
404 | tree base_binfos = BINFO_BASETYPES (base_binfo); | |
405 | tree delta; | |
406 | ||
407 | for (j = i+1; j < n_baselinks; j++) | |
408 | if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j))) | |
409 | { | |
410 | /* The next basetype offset must take into account the space | |
411 | between the classes, not just the size of each class. */ | |
412 | delta = size_binop (MINUS_EXPR, | |
413 | BINFO_OFFSET (TREE_VEC_ELT (binfos, j)), | |
414 | BINFO_OFFSET (base_binfo)); | |
415 | break; | |
416 | } | |
417 | ||
418 | #if 0 | |
419 | if (BINFO_OFFSET_ZEROP (base_binfo)) | |
420 | BINFO_OFFSET (base_binfo) = offset; | |
421 | else | |
422 | BINFO_OFFSET (base_binfo) | |
423 | = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset); | |
424 | #else | |
425 | BINFO_OFFSET (base_binfo) = offset; | |
426 | #endif | |
427 | if (base_binfos) | |
428 | { | |
429 | int k; | |
430 | tree chain = NULL_TREE; | |
431 | ||
432 | /* Now unshare the structure beneath BASE_BINFO. */ | |
433 | for (k = TREE_VEC_LENGTH (base_binfos)-1; | |
434 | k >= 0; k--) | |
435 | { | |
436 | tree base_base_binfo = TREE_VEC_ELT (base_binfos, k); | |
437 | if (! TREE_VIA_VIRTUAL (base_base_binfo)) | |
438 | TREE_VEC_ELT (base_binfos, k) | |
439 | = make_binfo (BINFO_OFFSET (base_base_binfo), | |
440 | BINFO_TYPE (base_base_binfo), | |
441 | BINFO_VTABLE (base_base_binfo), | |
442 | BINFO_VIRTUALS (base_base_binfo), | |
443 | chain); | |
444 | chain = TREE_VEC_ELT (base_binfos, k); | |
445 | TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo); | |
446 | TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo); | |
447 | } | |
448 | /* Now propagate the offset to the base types. */ | |
449 | propagate_binfo_offsets (base_binfo, offset); | |
450 | } | |
451 | ||
452 | /* Go to our next class that counts for offset propagation. */ | |
453 | i = j; | |
454 | if (i < n_baselinks) | |
455 | offset = size_binop (PLUS_EXPR, offset, delta); | |
456 | } | |
457 | } | |
458 | } | |
459 | ||
460 | /* Compute the actual offsets that our virtual base classes | |
461 | will have *for this type*. This must be performed after | |
462 | the fields are laid out, since virtual baseclasses must | |
463 | lay down at the end of the record. | |
464 | ||
465 | Returns the maximum number of virtual functions any of the virtual | |
466 | baseclasses provide. */ | |
467 | int | |
468 | layout_vbasetypes (rec, max) | |
469 | tree rec; | |
470 | int max; | |
471 | { | |
472 | /* Get all the virtual base types that this type uses. | |
473 | The TREE_VALUE slot holds the virtual baseclass type. */ | |
474 | tree vbase_types = get_vbase_types (rec); | |
475 | ||
476 | #ifdef STRUCTURE_SIZE_BOUNDARY | |
477 | unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec)); | |
478 | #else | |
479 | unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec)); | |
480 | #endif | |
481 | ||
482 | /* Record size so far is CONST_SIZE + VAR_SIZE bits, | |
483 | where CONST_SIZE is an integer | |
484 | and VAR_SIZE is a tree expression. | |
485 | If VAR_SIZE is null, the size is just CONST_SIZE. | |
486 | Naturally we try to avoid using VAR_SIZE. */ | |
487 | register unsigned const_size = 0; | |
488 | register tree var_size = 0; | |
489 | int nonvirtual_const_size; | |
490 | tree nonvirtual_var_size; | |
491 | ||
492 | CLASSTYPE_VBASECLASSES (rec) = vbase_types; | |
493 | ||
494 | if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST) | |
495 | const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec)); | |
496 | else | |
497 | var_size = TYPE_SIZE (rec); | |
498 | ||
499 | nonvirtual_const_size = const_size; | |
500 | nonvirtual_var_size = var_size; | |
501 | ||
502 | while (vbase_types) | |
503 | { | |
504 | tree basetype = BINFO_TYPE (vbase_types); | |
505 | tree offset; | |
506 | ||
507 | if (const_size == 0) | |
508 | offset = integer_zero_node; | |
509 | else | |
510 | offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT); | |
511 | ||
512 | if (CLASSTYPE_VSIZE (basetype) > max) | |
513 | max = CLASSTYPE_VSIZE (basetype); | |
514 | BINFO_OFFSET (vbase_types) = offset; | |
515 | ||
516 | if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST) | |
517 | const_size += MAX (record_align, | |
518 | TREE_INT_CST_LOW (TYPE_SIZE (basetype)) | |
519 | - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype))); | |
520 | else if (var_size == 0) | |
521 | var_size = TYPE_SIZE (basetype); | |
522 | else | |
523 | var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype)); | |
524 | ||
525 | vbase_types = TREE_CHAIN (vbase_types); | |
526 | } | |
527 | ||
528 | if (const_size != nonvirtual_const_size) | |
529 | { | |
530 | CLASSTYPE_VBASE_SIZE (rec) | |
531 | = size_int (const_size - nonvirtual_const_size); | |
532 | TYPE_SIZE (rec) = size_int (const_size); | |
533 | } | |
534 | ||
535 | /* Now propagate offset information throughout the lattice | |
536 | under the vbase type. */ | |
537 | for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types; | |
538 | vbase_types = TREE_CHAIN (vbase_types)) | |
539 | { | |
540 | tree base_binfos = BINFO_BASETYPES (vbase_types); | |
541 | ||
542 | if (base_binfos) | |
543 | { | |
544 | tree chain = NULL_TREE; | |
545 | int j; | |
546 | /* Now unshare the structure beneath BASE_BINFO. */ | |
547 | ||
548 | for (j = TREE_VEC_LENGTH (base_binfos)-1; | |
549 | j >= 0; j--) | |
550 | { | |
551 | tree base_base_binfo = TREE_VEC_ELT (base_binfos, j); | |
552 | if (! TREE_VIA_VIRTUAL (base_base_binfo)) | |
553 | TREE_VEC_ELT (base_binfos, j) | |
554 | = make_binfo (BINFO_OFFSET (base_base_binfo), | |
555 | BINFO_TYPE (base_base_binfo), | |
556 | BINFO_VTABLE (base_base_binfo), | |
557 | BINFO_VIRTUALS (base_base_binfo), | |
558 | chain); | |
559 | chain = TREE_VEC_ELT (base_binfos, j); | |
560 | TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo); | |
561 | TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo); | |
562 | } | |
563 | ||
564 | propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types)); | |
565 | } | |
566 | } | |
567 | ||
568 | return max; | |
569 | } | |
570 | ||
571 | /* Lay out the base types of a record type, REC. | |
572 | Tentatively set the size and alignment of REC | |
573 | according to the base types alone. | |
574 | ||
575 | Offsets for immediate nonvirtual baseclasses are also computed here. | |
576 | ||
577 | Returns list of virtual base classes in a FIELD_DECL chain. */ | |
578 | tree | |
579 | layout_basetypes (rec, binfos) | |
580 | tree rec, binfos; | |
581 | { | |
582 | /* Chain to hold all the new FIELD_DECLs which point at virtual | |
583 | base classes. */ | |
584 | tree vbase_decls = NULL_TREE; | |
585 | ||
586 | #ifdef STRUCTURE_SIZE_BOUNDARY | |
587 | unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec)); | |
588 | #else | |
589 | unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec)); | |
590 | #endif | |
591 | ||
592 | /* Record size so far is CONST_SIZE + VAR_SIZE bits, | |
593 | where CONST_SIZE is an integer | |
594 | and VAR_SIZE is a tree expression. | |
595 | If VAR_SIZE is null, the size is just CONST_SIZE. | |
596 | Naturally we try to avoid using VAR_SIZE. */ | |
597 | register unsigned const_size = 0; | |
598 | register tree var_size = 0; | |
599 | int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
600 | ||
601 | /* Handle basetypes almost like fields, but record their | |
602 | offsets differently. */ | |
603 | ||
604 | for (i = 0; i < n_baseclasses; i++) | |
605 | { | |
606 | int inc, desired_align, int_vbase_size; | |
607 | register tree base_binfo = TREE_VEC_ELT (binfos, i); | |
608 | register tree basetype = BINFO_TYPE (base_binfo); | |
609 | tree decl, offset; | |
610 | ||
611 | if (TYPE_SIZE (basetype) == 0) | |
612 | { | |
613 | #if 0 | |
614 | /* This error is now reported in xref_tag, thus giving better | |
615 | location information. */ | |
616 | error_with_aggr_type (base_binfo, | |
617 | "base class `%s' has incomplete type"); | |
618 | ||
619 | TREE_VIA_PUBLIC (base_binfo) = 1; | |
620 | TREE_VIA_PROTECTED (base_binfo) = 0; | |
621 | TREE_VIA_VIRTUAL (base_binfo) = 0; | |
622 | ||
623 | /* Should handle this better so that | |
624 | ||
625 | class A; | |
626 | class B: private A { virtual void F(); }; | |
627 | ||
628 | does not dump core when compiled. */ | |
629 | my_friendly_abort (121); | |
630 | #endif | |
631 | continue; | |
632 | } | |
633 | ||
634 | /* All basetypes are recorded in the association list of the | |
635 | derived type. */ | |
636 | ||
637 | if (TREE_VIA_VIRTUAL (base_binfo)) | |
638 | { | |
639 | int j; | |
640 | char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype) | |
641 | + sizeof (VBASE_NAME) + 1); | |
642 | ||
643 | /* The offset for a virtual base class is only used in computing | |
644 | virtual function tables and for initializing virtual base | |
645 | pointers. It is built once `get_vbase_types' is called. */ | |
646 | ||
647 | /* If this basetype can come from another vbase pointer | |
648 | without an additional indirection, we will share | |
649 | that pointer. If an indirection is involved, we | |
650 | make our own pointer. */ | |
651 | for (j = 0; j < n_baseclasses; j++) | |
652 | { | |
653 | tree other_base_binfo = TREE_VEC_ELT (binfos, j); | |
654 | if (! TREE_VIA_VIRTUAL (other_base_binfo) | |
655 | && binfo_member (basetype, | |
656 | CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo)))) | |
657 | goto got_it; | |
658 | } | |
659 | sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype)); | |
660 | decl = build_lang_decl (FIELD_DECL, get_identifier (name), | |
661 | build_pointer_type (basetype)); | |
662 | /* If you change any of the below, take a look at all the | |
663 | other VFIELD_BASEs and VTABLE_BASEs in the code, and change | |
664 | them too. */ | |
665 | DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE); | |
666 | DECL_VIRTUAL_P (decl) = 1; | |
667 | DECL_FIELD_CONTEXT (decl) = rec; | |
668 | DECL_CLASS_CONTEXT (decl) = rec; | |
669 | DECL_FCONTEXT (decl) = basetype; | |
670 | DECL_FIELD_SIZE (decl) = 0; | |
671 | DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node); | |
672 | TREE_CHAIN (decl) = vbase_decls; | |
673 | BINFO_VPTR_FIELD (base_binfo) = decl; | |
674 | vbase_decls = decl; | |
675 | ||
676 | if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype) | |
677 | && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE) | |
678 | { | |
679 | warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0), | |
680 | "destructor `%s' non-virtual"); | |
681 | warning ("in inheritance relationship `%s: virtual %s'", | |
682 | TYPE_NAME_STRING (rec), | |
683 | TYPE_NAME_STRING (basetype)); | |
684 | } | |
685 | got_it: | |
686 | /* The space this decl occupies has already been accounted for. */ | |
687 | continue; | |
688 | } | |
689 | ||
690 | if (const_size == 0) | |
691 | offset = integer_zero_node; | |
692 | else | |
693 | { | |
694 | /* Give each base type the alignment it wants. */ | |
695 | const_size = CEIL (const_size, TYPE_ALIGN (basetype)) | |
696 | * TYPE_ALIGN (basetype); | |
697 | offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT); | |
698 | ||
699 | if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype) | |
700 | && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE) | |
701 | { | |
702 | warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0), | |
703 | "destructor `%s' non-virtual"); | |
704 | warning ("in inheritance relationship `%s:%s %s'", | |
705 | TYPE_NAME_STRING (rec), | |
706 | TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "", | |
707 | TYPE_NAME_STRING (basetype)); | |
708 | } | |
709 | } | |
710 | BINFO_OFFSET (base_binfo) = offset; | |
711 | if (CLASSTYPE_VSIZE (basetype)) | |
712 | { | |
713 | BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype); | |
714 | BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype); | |
715 | } | |
716 | TREE_CHAIN (base_binfo) = TYPE_BINFO (rec); | |
717 | TYPE_BINFO (rec) = base_binfo; | |
718 | ||
719 | /* Add only the amount of storage not present in | |
720 | the virtual baseclasses. */ | |
721 | ||
722 | int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)); | |
723 | if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size) | |
724 | { | |
725 | inc = MAX (record_align, | |
726 | (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) | |
727 | - int_vbase_size)); | |
728 | ||
729 | /* Record must have at least as much alignment as any field. */ | |
730 | desired_align = TYPE_ALIGN (basetype); | |
731 | record_align = MAX (record_align, desired_align); | |
732 | ||
733 | const_size += inc; | |
734 | } | |
735 | } | |
736 | ||
737 | if (const_size) | |
738 | CLASSTYPE_SIZE (rec) = size_int (const_size); | |
739 | else | |
740 | CLASSTYPE_SIZE (rec) = integer_zero_node; | |
741 | CLASSTYPE_ALIGN (rec) = record_align; | |
742 | ||
743 | return vbase_decls; | |
744 | } | |
745 | \f | |
746 | /* Hashing of lists so that we don't make duplicates. | |
747 | The entry point is `list_hash_canon'. */ | |
748 | ||
749 | /* Each hash table slot is a bucket containing a chain | |
750 | of these structures. */ | |
751 | ||
752 | struct list_hash | |
753 | { | |
754 | struct list_hash *next; /* Next structure in the bucket. */ | |
755 | int hashcode; /* Hash code of this list. */ | |
756 | tree list; /* The list recorded here. */ | |
757 | }; | |
758 | ||
759 | /* Now here is the hash table. When recording a list, it is added | |
760 | to the slot whose index is the hash code mod the table size. | |
761 | Note that the hash table is used for several kinds of lists. | |
762 | While all these live in the same table, they are completely independent, | |
763 | and the hash code is computed differently for each of these. */ | |
764 | ||
765 | #define TYPE_HASH_SIZE 59 | |
766 | struct list_hash *list_hash_table[TYPE_HASH_SIZE]; | |
767 | ||
768 | /* Compute a hash code for a list (chain of TREE_LIST nodes | |
769 | with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the | |
770 | TREE_COMMON slots), by adding the hash codes of the individual entries. */ | |
771 | ||
772 | int | |
773 | list_hash (list) | |
774 | tree list; | |
775 | { | |
776 | register int hashcode = 0; | |
777 | ||
778 | if (TREE_CHAIN (list)) | |
779 | hashcode += TYPE_HASH (TREE_CHAIN (list)); | |
780 | ||
781 | if (TREE_VALUE (list)) | |
782 | hashcode += TYPE_HASH (TREE_VALUE (list)); | |
783 | else | |
784 | hashcode += 1007; | |
785 | if (TREE_PURPOSE (list)) | |
786 | hashcode += TYPE_HASH (TREE_PURPOSE (list)); | |
787 | else | |
788 | hashcode += 1009; | |
789 | return hashcode; | |
790 | } | |
791 | ||
792 | /* Look in the type hash table for a type isomorphic to TYPE. | |
793 | If one is found, return it. Otherwise return 0. */ | |
794 | ||
795 | tree | |
796 | list_hash_lookup (hashcode, list) | |
797 | int hashcode; | |
798 | tree list; | |
799 | { | |
800 | register struct list_hash *h; | |
801 | for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next) | |
802 | if (h->hashcode == hashcode | |
803 | && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list) | |
804 | && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list) | |
805 | && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list) | |
806 | && TREE_PURPOSE (h->list) == TREE_PURPOSE (list) | |
807 | && TREE_VALUE (h->list) == TREE_VALUE (list) | |
808 | && TREE_CHAIN (h->list) == TREE_CHAIN (list)) | |
809 | { | |
810 | my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299); | |
811 | return h->list; | |
812 | } | |
813 | return 0; | |
814 | } | |
815 | ||
816 | /* Add an entry to the list-hash-table | |
817 | for a list TYPE whose hash code is HASHCODE. */ | |
818 | ||
819 | void | |
820 | list_hash_add (hashcode, list) | |
821 | int hashcode; | |
822 | tree list; | |
823 | { | |
824 | register struct list_hash *h; | |
825 | ||
826 | h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash)); | |
827 | h->hashcode = hashcode; | |
828 | h->list = list; | |
829 | h->next = list_hash_table[hashcode % TYPE_HASH_SIZE]; | |
830 | list_hash_table[hashcode % TYPE_HASH_SIZE] = h; | |
831 | } | |
832 | ||
833 | /* Given TYPE, and HASHCODE its hash code, return the canonical | |
834 | object for an identical list if one already exists. | |
835 | Otherwise, return TYPE, and record it as the canonical object | |
836 | if it is a permanent object. | |
837 | ||
838 | To use this function, first create a list of the sort you want. | |
839 | Then compute its hash code from the fields of the list that | |
840 | make it different from other similar lists. | |
841 | Then call this function and use the value. | |
842 | This function frees the list you pass in if it is a duplicate. */ | |
843 | ||
844 | /* Set to 1 to debug without canonicalization. Never set by program. */ | |
845 | int debug_no_list_hash = 0; | |
846 | ||
847 | tree | |
848 | list_hash_canon (hashcode, list) | |
849 | int hashcode; | |
850 | tree list; | |
851 | { | |
852 | tree t1; | |
853 | ||
854 | if (debug_no_list_hash) | |
855 | return list; | |
856 | ||
857 | t1 = list_hash_lookup (hashcode, list); | |
858 | if (t1 != 0) | |
859 | { | |
860 | obstack_free (&class_obstack, list); | |
861 | return t1; | |
862 | } | |
863 | ||
864 | /* If this is a new list, record it for later reuse. */ | |
865 | list_hash_add (hashcode, list); | |
866 | ||
867 | return list; | |
868 | } | |
869 | ||
870 | tree | |
871 | hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain) | |
872 | int via_public, via_virtual, via_protected; | |
873 | tree purpose, value, chain; | |
874 | { | |
875 | struct obstack *ambient_obstack = current_obstack; | |
876 | tree t; | |
877 | int hashcode; | |
878 | ||
879 | current_obstack = &class_obstack; | |
880 | t = tree_cons (purpose, value, chain); | |
881 | TREE_VIA_PUBLIC (t) = via_public; | |
882 | TREE_VIA_PROTECTED (t) = via_protected; | |
883 | TREE_VIA_VIRTUAL (t) = via_virtual; | |
884 | hashcode = list_hash (t); | |
885 | t = list_hash_canon (hashcode, t); | |
886 | current_obstack = ambient_obstack; | |
887 | return t; | |
888 | } | |
889 | ||
890 | /* Constructor for hashed lists. */ | |
891 | tree | |
892 | hash_tree_chain (value, chain) | |
893 | tree value, chain; | |
894 | { | |
895 | struct obstack *ambient_obstack = current_obstack; | |
896 | tree t; | |
897 | int hashcode; | |
898 | ||
899 | current_obstack = &class_obstack; | |
900 | t = tree_cons (NULL_TREE, value, chain); | |
901 | hashcode = list_hash (t); | |
902 | t = list_hash_canon (hashcode, t); | |
903 | current_obstack = ambient_obstack; | |
904 | return t; | |
905 | } | |
906 | ||
907 | /* Similar, but used for concatenating two lists. */ | |
908 | tree | |
909 | hash_chainon (list1, list2) | |
910 | tree list1, list2; | |
911 | { | |
912 | if (list2 == 0) | |
913 | return list1; | |
914 | if (list1 == 0) | |
915 | return list2; | |
916 | if (TREE_CHAIN (list1) == NULL_TREE) | |
917 | return hash_tree_chain (TREE_VALUE (list1), list2); | |
918 | return hash_tree_chain (TREE_VALUE (list1), | |
919 | hash_chainon (TREE_CHAIN (list1), list2)); | |
920 | } | |
921 | ||
922 | tree | |
923 | get_decl_list (value) | |
924 | tree value; | |
925 | { | |
926 | tree list = NULL_TREE; | |
927 | ||
928 | if (TREE_CODE (value) == IDENTIFIER_NODE) | |
929 | { | |
930 | list = IDENTIFIER_AS_LIST (value); | |
931 | if (list != NULL_TREE | |
932 | && (TREE_CODE (list) != TREE_LIST | |
933 | || TREE_VALUE (list) != value)) | |
934 | list = NULL_TREE; | |
935 | else if (IDENTIFIER_HAS_TYPE_VALUE (value) | |
936 | && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE) | |
937 | { | |
938 | tree type = IDENTIFIER_TYPE_VALUE (value); | |
939 | if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE) | |
940 | CLASSTYPE_ID_AS_LIST (type) = perm_tree_cons (NULL_TREE, value, NULL_TREE); | |
941 | list = CLASSTYPE_ID_AS_LIST (type); | |
942 | } | |
943 | } | |
944 | else if (TREE_CODE (value) == RECORD_TYPE | |
945 | && TYPE_LANG_SPECIFIC (value)) | |
946 | list = CLASSTYPE_AS_LIST (value); | |
947 | ||
948 | if (list != NULL_TREE) | |
949 | { | |
950 | my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301); | |
951 | return list; | |
952 | } | |
953 | ||
954 | return build_decl_list (NULL_TREE, value); | |
955 | } | |
956 | ||
957 | /* Look in the type hash table for a type isomorphic to | |
958 | `build_tree_list (NULL_TREE, VALUE)'. | |
959 | If one is found, return it. Otherwise return 0. */ | |
960 | ||
961 | tree | |
962 | list_hash_lookup_or_cons (value) | |
963 | tree value; | |
964 | { | |
965 | register int hashcode = TYPE_HASH (value); | |
966 | register struct list_hash *h; | |
967 | struct obstack *ambient_obstack; | |
968 | tree list = NULL_TREE; | |
969 | ||
970 | if (TREE_CODE (value) == IDENTIFIER_NODE) | |
971 | { | |
972 | list = IDENTIFIER_AS_LIST (value); | |
973 | if (list != NULL_TREE | |
974 | && (TREE_CODE (list) != TREE_LIST | |
975 | || TREE_VALUE (list) != value)) | |
976 | list = NULL_TREE; | |
977 | else if (IDENTIFIER_HAS_TYPE_VALUE (value) | |
978 | && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE) | |
979 | { | |
980 | /* If the type name and constructor name are different, don't | |
981 | write constructor name into type. */ | |
982 | if (identifier_typedecl_value (value) | |
983 | && identifier_typedecl_value (value) != constructor_name (value)) | |
984 | list = tree_cons (NULL_TREE, value, NULL_TREE); | |
985 | else | |
986 | { | |
987 | tree type = IDENTIFIER_TYPE_VALUE (value); | |
988 | if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE) | |
989 | CLASSTYPE_ID_AS_LIST (type) = perm_tree_cons (NULL_TREE, value, | |
990 | NULL_TREE); | |
991 | list = CLASSTYPE_ID_AS_LIST (type); | |
992 | } | |
993 | } | |
994 | } | |
995 | else if (TREE_CODE (value) == TYPE_DECL | |
996 | && TREE_CODE (TREE_TYPE (value)) == RECORD_TYPE | |
997 | && TYPE_LANG_SPECIFIC (TREE_TYPE (value))) | |
998 | list = CLASSTYPE_ID_AS_LIST (TREE_TYPE (value)); | |
999 | else if (TREE_CODE (value) == RECORD_TYPE | |
1000 | && TYPE_LANG_SPECIFIC (value)) | |
1001 | list = CLASSTYPE_AS_LIST (value); | |
1002 | ||
1003 | if (list != NULL_TREE) | |
1004 | { | |
1005 | my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 302); | |
1006 | return list; | |
1007 | } | |
1008 | ||
1009 | if (debug_no_list_hash) | |
1010 | return hash_tree_chain (value, NULL_TREE); | |
1011 | ||
1012 | for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next) | |
1013 | if (h->hashcode == hashcode | |
1014 | && TREE_VIA_VIRTUAL (h->list) == 0 | |
1015 | && TREE_VIA_PUBLIC (h->list) == 0 | |
1016 | && TREE_VIA_PROTECTED (h->list) == 0 | |
1017 | && TREE_PURPOSE (h->list) == 0 | |
1018 | && TREE_VALUE (h->list) == value) | |
1019 | { | |
1020 | my_friendly_assert (TREE_TYPE (h->list) == 0, 303); | |
1021 | my_friendly_assert (TREE_CHAIN (h->list) == 0, 304); | |
1022 | return h->list; | |
1023 | } | |
1024 | ||
1025 | ambient_obstack = current_obstack; | |
1026 | current_obstack = &class_obstack; | |
1027 | list = build_tree_list (NULL_TREE, value); | |
1028 | list_hash_add (hashcode, list); | |
1029 | current_obstack = ambient_obstack; | |
1030 | return list; | |
1031 | } | |
1032 | \f | |
1033 | /* Build an association between TYPE and some parameters: | |
1034 | ||
1035 | OFFSET is the offset added to `this' to convert it to a pointer | |
1036 | of type `TYPE *' | |
1037 | ||
1038 | VTABLE is the virtual function table with which to initialize | |
1039 | sub-objects of type TYPE. | |
1040 | ||
1041 | VIRTUALS are the virtual functions sitting in VTABLE. | |
1042 | ||
1043 | CHAIN are more associations we must retain. */ | |
1044 | ||
1045 | tree | |
1046 | make_binfo (offset, type, vtable, virtuals, chain) | |
1047 | tree offset, type; | |
1048 | tree vtable, virtuals; | |
1049 | tree chain; | |
1050 | { | |
1051 | tree binfo = make_tree_vec (6); | |
1052 | tree old_binfo = TYPE_BINFO (type); | |
1053 | tree last; | |
1054 | ||
1055 | TREE_CHAIN (binfo) = chain; | |
1056 | if (chain) | |
1057 | TREE_USED (binfo) = TREE_USED (chain); | |
1058 | ||
1059 | TREE_TYPE (binfo) = TYPE_MAIN_VARIANT (type); | |
1060 | BINFO_OFFSET (binfo) = offset; | |
1061 | BINFO_VTABLE (binfo) = vtable; | |
1062 | BINFO_VIRTUALS (binfo) = virtuals; | |
1063 | BINFO_VPTR_FIELD (binfo) = NULL_TREE; | |
1064 | ||
1065 | last = binfo; | |
1066 | if (old_binfo != NULL_TREE | |
1067 | && BINFO_BASETYPES (old_binfo) != NULL_TREE) | |
1068 | { | |
1069 | int i, n_baseclasses = CLASSTYPE_N_BASECLASSES (type); | |
1070 | tree binfos = TYPE_BINFO_BASETYPES (type); | |
1071 | ||
1072 | BINFO_BASETYPES (binfo) = make_tree_vec (n_baseclasses); | |
1073 | for (i = 0; i < n_baseclasses; i++) | |
1074 | { | |
1075 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
1076 | tree old_base_binfo = old_binfo ? BINFO_BASETYPE (old_binfo, i) : 0; | |
1077 | BINFO_BASETYPE (binfo, i) = base_binfo; | |
1078 | if (old_binfo) | |
1079 | { | |
1080 | TREE_VIA_PUBLIC (base_binfo) = TREE_VIA_PUBLIC (old_base_binfo); | |
1081 | TREE_VIA_PROTECTED (base_binfo) = TREE_VIA_PROTECTED (old_base_binfo); | |
1082 | TREE_VIA_VIRTUAL (base_binfo) = TREE_VIA_VIRTUAL (old_base_binfo); | |
1083 | } | |
1084 | } | |
1085 | } | |
1086 | return binfo; | |
1087 | } | |
1088 | ||
1089 | tree | |
1090 | copy_binfo (list) | |
1091 | tree list; | |
1092 | { | |
1093 | tree binfo = copy_list (list); | |
1094 | tree rval = binfo; | |
1095 | while (binfo) | |
1096 | { | |
1097 | TREE_USED (binfo) = 0; | |
1098 | if (BINFO_BASETYPES (binfo)) | |
1099 | BINFO_BASETYPES (binfo) = copy_node (BINFO_BASETYPES (binfo)); | |
1100 | binfo = TREE_CHAIN (binfo); | |
1101 | } | |
1102 | return rval; | |
1103 | } | |
1104 | ||
1105 | /* Return the binfo value for ELEM in TYPE. */ | |
1106 | ||
1107 | tree | |
1108 | binfo_value (elem, type) | |
1109 | tree elem; | |
1110 | tree type; | |
1111 | { | |
1112 | if (get_base_distance (elem, type, 0, (tree *)0) == -2) | |
1113 | compiler_error ("base class `%s' ambiguous in binfo_value", | |
1114 | TYPE_NAME_STRING (elem)); | |
1115 | if (elem == type) | |
1116 | return TYPE_BINFO (type); | |
1117 | return get_binfo (elem, type, 0); | |
1118 | } | |
1119 | ||
1120 | tree | |
1121 | reverse_path (path) | |
1122 | tree path; | |
1123 | { | |
1124 | register tree prev = 0, tmp, next; | |
1125 | for (tmp = path; tmp; tmp = next) | |
1126 | { | |
1127 | next = BINFO_INHERITANCE_CHAIN (tmp); | |
1128 | BINFO_INHERITANCE_CHAIN (tmp) = prev; | |
1129 | prev = tmp; | |
1130 | } | |
1131 | return prev; | |
1132 | } | |
1133 | ||
1134 | tree | |
1135 | virtual_member (elem, list) | |
1136 | tree elem; | |
1137 | tree list; | |
1138 | { | |
1139 | tree t; | |
1140 | tree rval, nval; | |
1141 | ||
1142 | for (t = list; t; t = TREE_CHAIN (t)) | |
1143 | if (elem == BINFO_TYPE (t)) | |
1144 | return t; | |
1145 | rval = 0; | |
1146 | for (t = list; t; t = TREE_CHAIN (t)) | |
1147 | { | |
1148 | tree binfos = BINFO_BASETYPES (t); | |
1149 | int i; | |
1150 | ||
1151 | if (binfos != NULL_TREE) | |
1152 | for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--) | |
1153 | { | |
1154 | nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i))); | |
1155 | if (nval) | |
1156 | { | |
1157 | if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval)) | |
1158 | my_friendly_abort (104); | |
1159 | rval = nval; | |
1160 | } | |
1161 | } | |
1162 | } | |
1163 | return rval; | |
1164 | } | |
1165 | ||
1166 | /* Return the offset (as an INTEGER_CST) for ELEM in LIST. | |
1167 | INITIAL_OFFSET is the value to add to the offset that ELEM's | |
1168 | binfo entry in LIST provides. | |
1169 | ||
1170 | Returns NULL if ELEM does not have an binfo value in LIST. */ | |
1171 | ||
1172 | tree | |
1173 | virtual_offset (elem, list, initial_offset) | |
1174 | tree elem; | |
1175 | tree list; | |
1176 | tree initial_offset; | |
1177 | { | |
1178 | tree vb, offset; | |
1179 | tree rval, nval; | |
1180 | ||
1181 | for (vb = list; vb; vb = TREE_CHAIN (vb)) | |
1182 | if (elem == BINFO_TYPE (vb)) | |
1183 | return size_binop (PLUS_EXPR, initial_offset, BINFO_OFFSET (vb)); | |
1184 | rval = 0; | |
1185 | for (vb = list; vb; vb = TREE_CHAIN (vb)) | |
1186 | { | |
1187 | tree binfos = BINFO_BASETYPES (vb); | |
1188 | int i; | |
1189 | ||
1190 | if (binfos == NULL_TREE) | |
1191 | continue; | |
1192 | ||
1193 | for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--) | |
1194 | { | |
1195 | nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i))); | |
1196 | if (nval) | |
1197 | { | |
1198 | if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval)) | |
1199 | my_friendly_abort (105); | |
1200 | offset = BINFO_OFFSET (vb); | |
1201 | rval = nval; | |
1202 | } | |
1203 | } | |
1204 | } | |
1205 | if (rval == NULL_TREE) | |
1206 | return rval; | |
1207 | return size_binop (PLUS_EXPR, offset, BINFO_OFFSET (rval)); | |
1208 | } | |
1209 | ||
1210 | void | |
1211 | debug_binfo (elem) | |
1212 | tree elem; | |
1213 | { | |
1214 | int i; | |
1215 | tree virtuals; | |
1216 | ||
1217 | fprintf (stderr, "type \"%s\"; offset = %d\n", | |
1218 | TYPE_NAME_STRING (BINFO_TYPE (elem)), | |
1219 | TREE_INT_CST_LOW (BINFO_OFFSET (elem))); | |
1220 | fprintf (stderr, "vtable type:\n"); | |
1221 | debug_tree (BINFO_TYPE (elem)); | |
1222 | if (BINFO_VTABLE (elem)) | |
1223 | fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem)))); | |
1224 | else | |
1225 | fprintf (stderr, "no vtable decl yet\n"); | |
1226 | fprintf (stderr, "virtuals:\n"); | |
1227 | virtuals = BINFO_VIRTUALS (elem); | |
1228 | if (virtuals != 0) | |
1229 | { | |
1230 | virtuals = TREE_CHAIN (virtuals); | |
1231 | if (flag_dossier) | |
1232 | virtuals = TREE_CHAIN (virtuals); | |
1233 | } | |
1234 | i = 1; | |
1235 | while (virtuals) | |
1236 | { | |
1237 | tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0); | |
1238 | fprintf (stderr, "%s [%d =? %d]\n", | |
1239 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)), | |
1240 | i, TREE_INT_CST_LOW (DECL_VINDEX (fndecl))); | |
1241 | virtuals = TREE_CHAIN (virtuals); | |
1242 | i += 1; | |
1243 | } | |
1244 | } | |
1245 | ||
1246 | /* Return the length of a chain of nodes chained through DECL_CHAIN. | |
1247 | We expect a null pointer to mark the end of the chain. | |
1248 | This is the Lisp primitive `length'. */ | |
1249 | ||
1250 | int | |
1251 | decl_list_length (t) | |
1252 | tree t; | |
1253 | { | |
1254 | register tree tail; | |
1255 | register int len = 0; | |
1256 | ||
1257 | my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL, 300); | |
1258 | for (tail = t; tail; tail = DECL_CHAIN (tail)) | |
1259 | len++; | |
1260 | ||
1261 | return len; | |
1262 | } | |
1263 | ||
1264 | tree | |
1265 | fnaddr_from_vtable_entry (entry) | |
1266 | tree entry; | |
1267 | { | |
1268 | return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))); | |
1269 | } | |
1270 | ||
1271 | void | |
1272 | set_fnaddr_from_vtable_entry (entry, value) | |
1273 | tree entry, value; | |
1274 | { | |
1275 | TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))) = value; | |
1276 | } | |
1277 | ||
1278 | tree | |
1279 | function_arg_chain (t) | |
1280 | tree t; | |
1281 | { | |
1282 | return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t))); | |
1283 | } | |
1284 | ||
1285 | int | |
1286 | promotes_to_aggr_type (t, code) | |
1287 | tree t; | |
1288 | enum tree_code code; | |
1289 | { | |
1290 | if (TREE_CODE (t) == code) | |
1291 | t = TREE_TYPE (t); | |
1292 | return IS_AGGR_TYPE (t); | |
1293 | } | |
1294 | ||
1295 | int | |
1296 | is_aggr_type_2 (t1, t2) | |
1297 | tree t1, t2; | |
1298 | { | |
1299 | if (TREE_CODE (t1) != TREE_CODE (t2)) | |
1300 | return 0; | |
1301 | return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2); | |
1302 | } | |
1303 | ||
1304 | /* Give message using types TYPE1 and TYPE2 as arguments. | |
1305 | PFN is the function which will print the message; | |
1306 | S is the format string for PFN to use. */ | |
1307 | void | |
1308 | message_2_types (pfn, s, type1, type2) | |
1309 | void (*pfn) (); | |
1310 | char *s; | |
1311 | tree type1, type2; | |
1312 | { | |
1313 | tree name1 = TYPE_NAME (type1); | |
1314 | tree name2 = TYPE_NAME (type2); | |
1315 | if (TREE_CODE (name1) == TYPE_DECL) | |
1316 | name1 = DECL_NAME (name1); | |
1317 | if (TREE_CODE (name2) == TYPE_DECL) | |
1318 | name2 = DECL_NAME (name2); | |
1319 | (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2)); | |
1320 | } | |
1321 | \f | |
1322 | #define PRINT_RING_SIZE 4 | |
1323 | ||
1324 | char * | |
1325 | lang_printable_name (decl) | |
1326 | tree decl; | |
1327 | { | |
1328 | static tree decl_ring[PRINT_RING_SIZE]; | |
1329 | static char *print_ring[PRINT_RING_SIZE]; | |
1330 | static int ring_counter; | |
1331 | int i; | |
1332 | ||
1333 | if (TREE_CODE (decl) != FUNCTION_DECL | |
1334 | || DECL_LANG_SPECIFIC (decl) == 0) | |
1335 | { | |
1336 | if (DECL_NAME (decl)) | |
1337 | { | |
1338 | if (THIS_NAME_P (DECL_NAME (decl))) | |
1339 | return "this"; | |
1340 | return IDENTIFIER_POINTER (DECL_NAME (decl)); | |
1341 | } | |
1342 | return "((anonymous))"; | |
1343 | } | |
1344 | ||
1345 | /* See if this print name is lying around. */ | |
1346 | for (i = 0; i < PRINT_RING_SIZE; i++) | |
1347 | if (decl_ring[i] == decl) | |
1348 | /* yes, so return it. */ | |
1349 | return print_ring[i]; | |
1350 | ||
1351 | if (++ring_counter == PRINT_RING_SIZE) | |
1352 | ring_counter = 0; | |
1353 | ||
1354 | if (current_function_decl != NULL_TREE) | |
1355 | { | |
1356 | if (decl_ring[ring_counter] == current_function_decl) | |
1357 | ring_counter += 1; | |
1358 | if (ring_counter == PRINT_RING_SIZE) | |
1359 | ring_counter = 0; | |
1360 | if (decl_ring[ring_counter] == current_function_decl) | |
1361 | my_friendly_abort (106); | |
1362 | } | |
1363 | ||
1364 | if (print_ring[ring_counter]) | |
1365 | free (print_ring[ring_counter]); | |
1366 | ||
1367 | { | |
1368 | int print_ret_type_p | |
1369 | = (!DECL_CONSTRUCTOR_P (decl) | |
1370 | && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl))); | |
1371 | ||
1372 | char *name = (char *)fndecl_as_string (0, decl, print_ret_type_p); | |
1373 | print_ring[ring_counter] = (char *)malloc (strlen (name) + 1); | |
1374 | strcpy (print_ring[ring_counter], name); | |
1375 | decl_ring[ring_counter] = decl; | |
1376 | } | |
1377 | return print_ring[ring_counter]; | |
1378 | } | |
1379 | \f | |
1380 | /* Comparison function for sorting identifiers in RAISES lists. | |
1381 | Note that because IDENTIFIER_NODEs are unique, we can sort | |
1382 | them by address, saving an indirection. */ | |
1383 | static int | |
1384 | id_cmp (p1, p2) | |
1385 | tree *p1, *p2; | |
1386 | { | |
1387 | return (HOST_WIDE_INT)TREE_VALUE (*p1) - (HOST_WIDE_INT)TREE_VALUE (*p2); | |
1388 | } | |
1389 | ||
1390 | /* Build the FUNCTION_TYPE or METHOD_TYPE which may raise exceptions | |
1391 | listed in RAISES. */ | |
1392 | tree | |
1393 | build_exception_variant (ctype, type, raises) | |
1394 | tree ctype, type; | |
1395 | tree raises; | |
1396 | { | |
1397 | int i; | |
1398 | tree v = TYPE_MAIN_VARIANT (type); | |
1399 | tree t, t2, cname; | |
1400 | tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree)); | |
1401 | int constp = TYPE_READONLY (type); | |
1402 | int volatilep = TYPE_VOLATILE (type); | |
1403 | ||
1404 | if (raises && TREE_CHAIN (raises)) | |
1405 | { | |
1406 | for (i = 0, t = raises; t; t = TREE_CHAIN (t), i++) | |
1407 | a[i] = t; | |
1408 | /* NULL terminator for list. */ | |
1409 | a[i] = NULL_TREE; | |
1410 | qsort (a, i, sizeof (tree), id_cmp); | |
1411 | while (i--) | |
1412 | TREE_CHAIN (a[i]) = a[i+1]; | |
1413 | raises = a[0]; | |
1414 | } | |
1415 | else if (raises) | |
1416 | /* do nothing. */; | |
1417 | else | |
1418 | return build_type_variant (v, constp, volatilep); | |
1419 | ||
1420 | if (ctype) | |
1421 | { | |
1422 | cname = TYPE_NAME (ctype); | |
1423 | if (TREE_CODE (cname) == TYPE_DECL) | |
1424 | cname = DECL_NAME (cname); | |
1425 | } | |
1426 | else | |
1427 | cname = NULL_TREE; | |
1428 | ||
1429 | for (t = raises; t; t = TREE_CHAIN (t)) | |
1430 | { | |
1431 | /* See that all the exceptions we are thinking about | |
1432 | raising have been declared. */ | |
1433 | tree this_cname = lookup_exception_cname (ctype, cname, t); | |
1434 | tree decl = lookup_exception_object (this_cname, TREE_VALUE (t), 1); | |
1435 | ||
1436 | if (decl == NULL_TREE) | |
1437 | decl = lookup_exception_object (this_cname, TREE_VALUE (t), 0); | |
1438 | /* Place canonical exception decl into TREE_TYPE of RAISES list. */ | |
1439 | TREE_TYPE (t) = decl; | |
1440 | } | |
1441 | ||
1442 | for (v = TYPE_NEXT_VARIANT (v); v; v = TYPE_NEXT_VARIANT (v)) | |
1443 | { | |
1444 | if (TYPE_READONLY (v) != constp | |
1445 | || TYPE_VOLATILE (v) != volatilep) | |
1446 | continue; | |
1447 | ||
1448 | t = raises; | |
1449 | t2 = TYPE_RAISES_EXCEPTIONS (v); | |
1450 | while (t && t2) | |
1451 | { | |
1452 | if (TREE_TYPE (t) == TREE_TYPE (t2)) | |
1453 | { | |
1454 | t = TREE_CHAIN (t); | |
1455 | t2 = TREE_CHAIN (t2); | |
1456 | } | |
1457 | else break; | |
1458 | } | |
1459 | if (t || t2) | |
1460 | continue; | |
1461 | /* List of exceptions raised matches previously found list. | |
1462 | ||
1463 | @@ Nice to free up storage used in consing up the | |
1464 | @@ list of exceptions raised. */ | |
1465 | return v; | |
1466 | } | |
1467 | ||
1468 | /* Need to build a new variant. */ | |
1469 | v = copy_node (type); | |
1470 | TYPE_NEXT_VARIANT (v) = TYPE_NEXT_VARIANT (type); | |
1471 | TYPE_NEXT_VARIANT (type) = v; | |
1472 | if (raises && ! TREE_PERMANENT (raises)) | |
1473 | { | |
1474 | push_obstacks_nochange (); | |
1475 | end_temporary_allocation (); | |
1476 | raises = copy_list (raises); | |
1477 | pop_obstacks (); | |
1478 | } | |
1479 | TYPE_RAISES_EXCEPTIONS (v) = raises; | |
1480 | return v; | |
1481 | } | |
1482 | ||
1483 | /* Subroutine of copy_to_permanent | |
1484 | ||
1485 | Assuming T is a node build bottom-up, make it all exist on | |
1486 | permanent obstack, if it is not permanent already. */ | |
1487 | static tree | |
1488 | make_deep_copy (t) | |
1489 | tree t; | |
1490 | { | |
1491 | enum tree_code code; | |
1492 | ||
1493 | if (t == NULL_TREE || TREE_PERMANENT (t)) | |
1494 | return t; | |
1495 | ||
1496 | switch (code = TREE_CODE (t)) | |
1497 | { | |
1498 | case ERROR_MARK: | |
1499 | return error_mark_node; | |
1500 | ||
1501 | case VAR_DECL: | |
1502 | case FUNCTION_DECL: | |
1503 | case CONST_DECL: | |
1504 | break; | |
1505 | ||
1506 | case PARM_DECL: | |
1507 | { | |
1508 | tree chain = TREE_CHAIN (t); | |
1509 | t = copy_node (t); | |
1510 | TREE_CHAIN (t) = make_deep_copy (chain); | |
1511 | TREE_TYPE (t) = make_deep_copy (TREE_TYPE (t)); | |
1512 | DECL_INITIAL (t) = make_deep_copy (DECL_INITIAL (t)); | |
1513 | DECL_SIZE (t) = make_deep_copy (DECL_SIZE (t)); | |
1514 | return t; | |
1515 | } | |
1516 | ||
1517 | case TREE_LIST: | |
1518 | { | |
1519 | tree chain = TREE_CHAIN (t); | |
1520 | t = copy_node (t); | |
1521 | TREE_PURPOSE (t) = make_deep_copy (TREE_PURPOSE (t)); | |
1522 | TREE_VALUE (t) = make_deep_copy (TREE_VALUE (t)); | |
1523 | TREE_CHAIN (t) = make_deep_copy (chain); | |
1524 | return t; | |
1525 | } | |
1526 | ||
1527 | case TREE_VEC: | |
1528 | { | |
1529 | int len = TREE_VEC_LENGTH (t); | |
1530 | ||
1531 | t = copy_node (t); | |
1532 | while (len--) | |
1533 | TREE_VEC_ELT (t, len) = make_deep_copy (TREE_VEC_ELT (t, len)); | |
1534 | return t; | |
1535 | } | |
1536 | ||
1537 | case INTEGER_CST: | |
1538 | case REAL_CST: | |
1539 | case STRING_CST: | |
1540 | return copy_node (t); | |
1541 | ||
1542 | case COND_EXPR: | |
1543 | case TARGET_EXPR: | |
1544 | case NEW_EXPR: | |
1545 | t = copy_node (t); | |
1546 | TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0)); | |
1547 | TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1)); | |
1548 | TREE_OPERAND (t, 2) = make_deep_copy (TREE_OPERAND (t, 2)); | |
1549 | return t; | |
1550 | ||
1551 | case SAVE_EXPR: | |
1552 | t = copy_node (t); | |
1553 | TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0)); | |
1554 | return t; | |
1555 | ||
1556 | case MODIFY_EXPR: | |
1557 | case PLUS_EXPR: | |
1558 | case MINUS_EXPR: | |
1559 | case MULT_EXPR: | |
1560 | case TRUNC_DIV_EXPR: | |
1561 | case TRUNC_MOD_EXPR: | |
1562 | case MIN_EXPR: | |
1563 | case MAX_EXPR: | |
1564 | case LSHIFT_EXPR: | |
1565 | case RSHIFT_EXPR: | |
1566 | case BIT_IOR_EXPR: | |
1567 | case BIT_XOR_EXPR: | |
1568 | case BIT_AND_EXPR: | |
1569 | case BIT_ANDTC_EXPR: | |
1570 | case TRUTH_ANDIF_EXPR: | |
1571 | case TRUTH_ORIF_EXPR: | |
1572 | case LT_EXPR: | |
1573 | case LE_EXPR: | |
1574 | case GT_EXPR: | |
1575 | case GE_EXPR: | |
1576 | case EQ_EXPR: | |
1577 | case NE_EXPR: | |
1578 | case CEIL_DIV_EXPR: | |
1579 | case FLOOR_DIV_EXPR: | |
1580 | case ROUND_DIV_EXPR: | |
1581 | case CEIL_MOD_EXPR: | |
1582 | case FLOOR_MOD_EXPR: | |
1583 | case ROUND_MOD_EXPR: | |
1584 | case COMPOUND_EXPR: | |
1585 | case PREDECREMENT_EXPR: | |
1586 | case PREINCREMENT_EXPR: | |
1587 | case POSTDECREMENT_EXPR: | |
1588 | case POSTINCREMENT_EXPR: | |
1589 | case CALL_EXPR: | |
1590 | t = copy_node (t); | |
1591 | TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0)); | |
1592 | TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1)); | |
1593 | return t; | |
1594 | ||
1595 | case CONVERT_EXPR: | |
1596 | case ADDR_EXPR: | |
1597 | case INDIRECT_REF: | |
1598 | case NEGATE_EXPR: | |
1599 | case BIT_NOT_EXPR: | |
1600 | case TRUTH_NOT_EXPR: | |
1601 | case NOP_EXPR: | |
1602 | case COMPONENT_REF: | |
1603 | t = copy_node (t); | |
1604 | TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0)); | |
1605 | return t; | |
1606 | ||
1607 | /* This list is incomplete, but should suffice for now. | |
1608 | It is very important that `sorry' does not call | |
1609 | `report_error_function'. That could cause an infinite loop. */ | |
1610 | default: | |
1611 | sorry ("initializer contains unrecognized tree code"); | |
1612 | return error_mark_node; | |
1613 | ||
1614 | } | |
1615 | my_friendly_abort (107); | |
1616 | /* NOTREACHED */ | |
1617 | return NULL_TREE; | |
1618 | } | |
1619 | ||
1620 | /* Assuming T is a node built bottom-up, make it all exist on | |
1621 | permanent obstack, if it is not permanent already. */ | |
1622 | tree | |
1623 | copy_to_permanent (t) | |
1624 | tree t; | |
1625 | { | |
1626 | register struct obstack *ambient_obstack = current_obstack; | |
1627 | register struct obstack *ambient_saveable_obstack = saveable_obstack; | |
1628 | ||
1629 | if (t == NULL_TREE || TREE_PERMANENT (t)) | |
1630 | return t; | |
1631 | ||
1632 | saveable_obstack = &permanent_obstack; | |
1633 | current_obstack = saveable_obstack; | |
1634 | ||
1635 | t = make_deep_copy (t); | |
1636 | ||
1637 | current_obstack = ambient_obstack; | |
1638 | saveable_obstack = ambient_saveable_obstack; | |
1639 | ||
1640 | return t; | |
1641 | } | |
1642 | ||
1643 | void | |
1644 | print_lang_statistics () | |
1645 | { | |
1646 | extern struct obstack maybepermanent_obstack; | |
1647 | print_obstack_statistics ("class_obstack", &class_obstack); | |
1648 | print_obstack_statistics ("permanent_obstack", &permanent_obstack); | |
1649 | print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack); | |
1650 | print_search_statistics (); | |
1651 | print_class_statistics (); | |
1652 | } | |
1653 | ||
1654 | /* This is used by the `assert' macro. It is provided in libgcc.a, | |
1655 | which `cc' doesn't know how to link. Note that the C++ front-end | |
1656 | no longer actually uses the `assert' macro (instead, it calls | |
1657 | my_friendly_assert). But all of the back-end files still need this. */ | |
1658 | void | |
1659 | __eprintf (string, expression, line, filename) | |
1660 | #ifdef __STDC__ | |
1661 | const char *string; | |
1662 | const char *expression; | |
1663 | unsigned line; | |
1664 | const char *filename; | |
1665 | #else | |
1666 | char *string; | |
1667 | char *expression; | |
1668 | unsigned line; | |
1669 | char *filename; | |
1670 | #endif | |
1671 | { | |
1672 | fprintf (stderr, string, expression, line, filename); | |
1673 | fflush (stderr); | |
1674 | abort (); | |
1675 | } | |
1676 | ||
1677 | /* Return, as an INTEGER_CST node, the number of elements for | |
1678 | TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */ | |
1679 | ||
1680 | tree | |
1681 | array_type_nelts_top (type) | |
1682 | tree type; | |
1683 | { | |
1684 | return fold (build (PLUS_EXPR, integer_type_node, | |
1685 | array_type_nelts (type), | |
1686 | integer_one_node)); | |
1687 | } | |
1688 | ||
1689 | /* Return, as an INTEGER_CST node, the number of elements for | |
1690 | TYPE (which is an ARRAY_TYPE). This one is a recursive count of all | |
1691 | ARRAY_TYPEs that are clumped together. */ | |
1692 | ||
1693 | tree | |
1694 | array_type_nelts_total (type) | |
1695 | tree type; | |
1696 | { | |
1697 | tree sz = array_type_nelts_top (type); | |
1698 | type = TREE_TYPE (type); | |
1699 | while (TREE_CODE (type) == ARRAY_TYPE) | |
1700 | { | |
1701 | tree n = array_type_nelts_top (type); | |
1702 | sz = fold (build (MULT_EXPR, integer_type_node, sz, n)); | |
1703 | type = TREE_TYPE (type); | |
1704 | } | |
1705 | return sz; | |
1706 | } |