Commit | Line | Data |
---|---|---|
9bf86ebb PR |
1 | /* Breadth-first and depth-first routines for |
2 | searching multiple-inheritance lattice for GNU C++. | |
3 | Copyright (C) 1987, 1989, 1992, 1993 Free Software Foundation, Inc. | |
4 | Contributed by Michael Tiemann (tiemann@cygnus.com) | |
5 | ||
6 | This file is part of GNU CC. | |
7 | ||
8 | GNU CC is free software; you can redistribute it and/or modify | |
9 | it under the terms of the GNU General Public License as published by | |
10 | the Free Software Foundation; either version 2, or (at your option) | |
11 | any later version. | |
12 | ||
13 | GNU CC is distributed in the hope that it will be useful, | |
14 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 | GNU General Public License for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
19 | along with GNU CC; see the file COPYING. If not, write to | |
20 | the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ | |
21 | ||
22 | #if 0 | |
23 | /* Remove before release, should only appear for development and testing. */ | |
24 | #define CHECK_convert_pointer_to_single_level | |
25 | #endif | |
26 | ||
27 | /* High-level class interface. */ | |
28 | ||
29 | #include "config.h" | |
30 | #include "tree.h" | |
31 | #include <stdio.h> | |
32 | #include "cp-tree.h" | |
33 | #include "obstack.h" | |
34 | #include "flags.h" | |
35 | ||
36 | #define obstack_chunk_alloc xmalloc | |
37 | #define obstack_chunk_free free | |
38 | ||
39 | void init_search (); | |
40 | extern struct obstack *current_obstack; | |
41 | ||
42 | #include "stack.h" | |
43 | ||
44 | /* Obstack used for remembering decision points of breadth-first. */ | |
45 | static struct obstack search_obstack; | |
46 | ||
47 | /* Obstack used to bridge from one function context to another. */ | |
48 | static struct obstack bridge_obstack; | |
49 | ||
50 | /* Methods for pushing and popping objects to and from obstacks. */ | |
51 | ||
52 | struct stack_level * | |
53 | push_stack_level (obstack, tp, size) | |
54 | struct obstack *obstack; | |
55 | char *tp; /* Sony NewsOS 5.0 compiler doesn't like void * here. */ | |
56 | int size; | |
57 | { | |
58 | struct stack_level *stack; | |
59 | /* FIXME. Doesn't obstack_grow, in the case when the current chunk has | |
60 | insufficient space, move the base so that obstack_next_free is not | |
61 | valid? Perhaps obstack_copy should be used rather than obstack_grow, | |
62 | and its returned value be used. -- Raeburn | |
63 | */ | |
64 | stack = (struct stack_level *) obstack_next_free (obstack); | |
65 | obstack_grow (obstack, tp, size); | |
66 | obstack_finish (obstack); | |
67 | stack->obstack = obstack; | |
68 | stack->first = (tree *) obstack_base (obstack); | |
69 | stack->limit = obstack_room (obstack) / sizeof (tree *); | |
70 | return stack; | |
71 | } | |
72 | ||
73 | struct stack_level * | |
74 | pop_stack_level (stack) | |
75 | struct stack_level *stack; | |
76 | { | |
77 | struct stack_level *tem = stack; | |
78 | struct obstack *obstack = tem->obstack; | |
79 | stack = tem->prev; | |
80 | obstack_free (obstack, tem); | |
81 | return stack; | |
82 | } | |
83 | ||
84 | #define search_level stack_level | |
85 | static struct search_level *search_stack; | |
86 | ||
87 | static tree lookup_field_1 (); | |
88 | static int lookup_fnfields_1 (); | |
89 | static void dfs_walk (); | |
90 | static int markedp (); | |
91 | static void dfs_unmark (); | |
92 | static void dfs_init_vbase_pointers (); | |
93 | ||
94 | static tree vbase_types; | |
95 | static tree vbase_decl, vbase_decl_ptr; | |
96 | static tree vbase_decl_ptr_intermediate; | |
97 | static tree vbase_init_result; | |
98 | ||
99 | /* Allocate a level of searching. */ | |
100 | static struct search_level * | |
101 | push_search_level (stack, obstack) | |
102 | struct stack_level *stack; | |
103 | struct obstack *obstack; | |
104 | { | |
105 | struct search_level tem; | |
106 | tem.prev = stack; | |
107 | ||
108 | return push_stack_level (obstack, (char *) &tem, sizeof (tem)); | |
109 | } | |
110 | ||
111 | /* Discard a level of search allocation. */ | |
112 | #define pop_search_level pop_stack_level | |
113 | \f | |
114 | /* Search memoization. */ | |
115 | struct type_level | |
116 | { | |
117 | struct stack_level base; | |
118 | ||
119 | /* First object allocated in obstack of entries. */ | |
120 | char *entries; | |
121 | ||
122 | /* Number of types memoized in this context. */ | |
123 | int len; | |
124 | ||
125 | /* Type being memoized; save this if we are saving | |
126 | memoized contexts. */ | |
127 | tree type; | |
128 | }; | |
129 | ||
130 | /* Obstack used for memoizing member and member function lookup. */ | |
131 | ||
132 | static struct obstack type_obstack, type_obstack_entries; | |
133 | static struct type_level *type_stack; | |
134 | static tree _vptr_name; | |
135 | ||
136 | /* Make things that look like tree nodes, but allocate them | |
137 | on type_obstack_entries. */ | |
138 | static int my_tree_node_counter; | |
139 | static tree my_tree_cons (), my_build_string (); | |
140 | ||
141 | extern int flag_memoize_lookups, flag_save_memoized_contexts; | |
142 | ||
143 | /* Variables for gathering statistics. */ | |
144 | static int my_memoized_entry_counter; | |
145 | static int memoized_fast_finds[2], memoized_adds[2], memoized_fast_rejects[2]; | |
146 | static int memoized_fields_searched[2]; | |
147 | static int n_fields_searched; | |
148 | static int n_calls_lookup_field, n_calls_lookup_field_1; | |
149 | static int n_calls_lookup_fnfields, n_calls_lookup_fnfields_1; | |
150 | static int n_calls_get_base_type; | |
151 | static int n_outer_fields_searched; | |
152 | static int n_contexts_saved; | |
153 | ||
154 | /* Local variables to help save memoization contexts. */ | |
155 | static tree prev_type_memoized; | |
156 | static struct type_level *prev_type_stack; | |
157 | ||
158 | /* Allocate a level of type memoization context. */ | |
159 | static struct type_level * | |
160 | push_type_level (stack, obstack) | |
161 | struct stack_level *stack; | |
162 | struct obstack *obstack; | |
163 | { | |
164 | struct type_level tem; | |
165 | ||
166 | tem.base.prev = stack; | |
167 | ||
168 | obstack_finish (&type_obstack_entries); | |
169 | tem.entries = (char *) obstack_base (&type_obstack_entries); | |
170 | tem.len = 0; | |
171 | tem.type = NULL_TREE; | |
172 | ||
173 | return (struct type_level *)push_stack_level (obstack, (char *) &tem, | |
174 | sizeof (tem)); | |
175 | } | |
176 | ||
177 | /* Discard a level of type memoization context. */ | |
178 | ||
179 | static struct type_level * | |
180 | pop_type_level (stack) | |
181 | struct type_level *stack; | |
182 | { | |
183 | obstack_free (&type_obstack_entries, stack->entries); | |
184 | return (struct type_level *)pop_stack_level ((struct stack_level *)stack); | |
185 | } | |
186 | ||
187 | /* Make something that looks like a TREE_LIST, but | |
188 | do it on the type_obstack_entries obstack. */ | |
189 | static tree | |
190 | my_tree_cons (purpose, value, chain) | |
191 | tree purpose, value, chain; | |
192 | { | |
193 | tree p = (tree)obstack_alloc (&type_obstack_entries, sizeof (struct tree_list)); | |
194 | ++my_tree_node_counter; | |
195 | TREE_TYPE (p) = NULL_TREE; | |
196 | ((HOST_WIDE_INT *)p)[3] = 0; | |
197 | TREE_SET_CODE (p, TREE_LIST); | |
198 | TREE_PURPOSE (p) = purpose; | |
199 | TREE_VALUE (p) = value; | |
200 | TREE_CHAIN (p) = chain; | |
201 | return p; | |
202 | } | |
203 | ||
204 | static tree | |
205 | my_build_string (str) | |
206 | char *str; | |
207 | { | |
208 | tree p = (tree)obstack_alloc (&type_obstack_entries, sizeof (struct tree_string)); | |
209 | ++my_tree_node_counter; | |
210 | TREE_TYPE (p) = 0; | |
211 | ((int *)p)[3] = 0; | |
212 | TREE_SET_CODE (p, STRING_CST); | |
213 | TREE_STRING_POINTER (p) = str; | |
214 | TREE_STRING_LENGTH (p) = strlen (str); | |
215 | return p; | |
216 | } | |
217 | \f | |
218 | /* Memoizing machinery to make searches for multiple inheritance | |
219 | reasonably efficient. */ | |
220 | #define MEMOIZE_HASHSIZE 8 | |
221 | typedef struct memoized_entry | |
222 | { | |
223 | struct memoized_entry *chain; | |
224 | int uid; | |
225 | tree data_members[MEMOIZE_HASHSIZE]; | |
226 | tree function_members[MEMOIZE_HASHSIZE]; | |
227 | } *ME; | |
228 | ||
229 | #define MEMOIZED_CHAIN(ENTRY) (((ME)ENTRY)->chain) | |
230 | #define MEMOIZED_UID(ENTRY) (((ME)ENTRY)->uid) | |
231 | #define MEMOIZED_FIELDS(ENTRY,INDEX) (((ME)ENTRY)->data_members[INDEX]) | |
232 | #define MEMOIZED_FNFIELDS(ENTRY,INDEX) (((ME)ENTRY)->function_members[INDEX]) | |
233 | /* The following is probably a lousy hash function. */ | |
234 | #define MEMOIZED_HASH_FN(NODE) (((long)(NODE)>>4)&(MEMOIZE_HASHSIZE - 1)) | |
235 | ||
236 | static struct memoized_entry * | |
237 | my_new_memoized_entry (chain) | |
238 | struct memoized_entry *chain; | |
239 | { | |
240 | struct memoized_entry *p = | |
241 | (struct memoized_entry *)obstack_alloc (&type_obstack_entries, | |
242 | sizeof (struct memoized_entry)); | |
243 | bzero (p, sizeof (struct memoized_entry)); | |
244 | MEMOIZED_CHAIN (p) = chain; | |
245 | MEMOIZED_UID (p) = ++my_memoized_entry_counter; | |
246 | return p; | |
247 | } | |
248 | ||
249 | /* Make an entry in the memoized table for type TYPE | |
250 | that the entry for NAME is FIELD. */ | |
251 | ||
252 | tree | |
253 | make_memoized_table_entry (type, name, function_p) | |
254 | tree type, name; | |
255 | int function_p; | |
256 | { | |
257 | int index = MEMOIZED_HASH_FN (name); | |
258 | tree entry, *prev_entry; | |
259 | ||
260 | memoized_adds[function_p] += 1; | |
261 | if (CLASSTYPE_MTABLE_ENTRY (type) == 0) | |
262 | { | |
263 | obstack_ptr_grow (&type_obstack, type); | |
264 | obstack_blank (&type_obstack, sizeof (struct memoized_entry *)); | |
265 | CLASSTYPE_MTABLE_ENTRY (type) = (char *)my_new_memoized_entry ((struct memoized_entry *)0); | |
266 | type_stack->len++; | |
267 | if (type_stack->len * 2 >= type_stack->base.limit) | |
268 | my_friendly_abort (88); | |
269 | } | |
270 | if (function_p) | |
271 | prev_entry = &MEMOIZED_FNFIELDS (CLASSTYPE_MTABLE_ENTRY (type), index); | |
272 | else | |
273 | prev_entry = &MEMOIZED_FIELDS (CLASSTYPE_MTABLE_ENTRY (type), index); | |
274 | ||
275 | entry = my_tree_cons (name, NULL_TREE, *prev_entry); | |
276 | *prev_entry = entry; | |
277 | ||
278 | /* Don't know the error message to give yet. */ | |
279 | TREE_TYPE (entry) = error_mark_node; | |
280 | ||
281 | return entry; | |
282 | } | |
283 | ||
284 | /* When a new function or class context is entered, we build | |
285 | a table of types which have been searched for members. | |
286 | The table is an array (obstack) of types. When a type is | |
287 | entered into the obstack, its CLASSTYPE_MTABLE_ENTRY | |
288 | field is set to point to a new record, of type struct memoized_entry. | |
289 | ||
290 | A non-NULL TREE_TYPE of the entry contains a visibility error message. | |
291 | ||
292 | The slots for the data members are arrays of tree nodes. | |
293 | These tree nodes are lists, with the TREE_PURPOSE | |
294 | of this list the known member name, and the TREE_VALUE | |
295 | as the FIELD_DECL for the member. | |
296 | ||
297 | For member functions, the TREE_PURPOSE is again the | |
298 | name of the member functions for that class, | |
299 | and the TREE_VALUE of the list is a pairs | |
300 | whose TREE_PURPOSE is a member functions of this name, | |
301 | and whose TREE_VALUE is a list of known argument lists this | |
302 | member function has been called with. The TREE_TYPE of the pair, | |
303 | if non-NULL, is an error message to print. */ | |
304 | ||
305 | /* Tell search machinery that we are entering a new context, and | |
306 | to update tables appropriately. | |
307 | ||
308 | TYPE is the type of the context we are entering, which can | |
309 | be NULL_TREE if we are not in a class's scope. | |
310 | ||
311 | USE_OLD, if nonzero tries to use previous context. */ | |
312 | void | |
313 | push_memoized_context (type, use_old) | |
314 | tree type; | |
315 | int use_old; | |
316 | { | |
317 | int len; | |
318 | tree *tem; | |
319 | ||
320 | if (prev_type_stack) | |
321 | { | |
322 | if (use_old && prev_type_memoized == type) | |
323 | { | |
324 | #ifdef GATHER_STATISTICS | |
325 | n_contexts_saved++; | |
326 | #endif | |
327 | type_stack = prev_type_stack; | |
328 | prev_type_stack = 0; | |
329 | ||
330 | tem = &type_stack->base.first[0]; | |
331 | len = type_stack->len; | |
332 | while (len--) | |
333 | CLASSTYPE_MTABLE_ENTRY (tem[len*2]) = (char *)tem[len*2+1]; | |
334 | return; | |
335 | } | |
336 | /* Otherwise, need to pop old stack here. */ | |
337 | type_stack = pop_type_level (prev_type_stack); | |
338 | prev_type_memoized = 0; | |
339 | prev_type_stack = 0; | |
340 | } | |
341 | ||
342 | type_stack = push_type_level ((struct stack_level *)type_stack, | |
343 | &type_obstack); | |
344 | type_stack->type = type; | |
345 | } | |
346 | ||
347 | /* Tell search machinery that we have left a context. | |
348 | We do not currently save these contexts for later use. | |
349 | If we wanted to, we could not use pop_search_level, since | |
350 | poping that level allows the data we have collected to | |
351 | be clobbered; a stack of obstacks would be needed. */ | |
352 | void | |
353 | pop_memoized_context (use_old) | |
354 | int use_old; | |
355 | { | |
356 | int len; | |
357 | tree *tem = &type_stack->base.first[0]; | |
358 | ||
359 | if (! flag_save_memoized_contexts) | |
360 | use_old = 0; | |
361 | else if (use_old) | |
362 | { | |
363 | len = type_stack->len; | |
364 | while (len--) | |
365 | tem[len*2+1] = (tree)CLASSTYPE_MTABLE_ENTRY (tem[len*2]); | |
366 | ||
367 | prev_type_stack = type_stack; | |
368 | prev_type_memoized = type_stack->type; | |
369 | } | |
370 | ||
371 | if (flag_memoize_lookups) | |
372 | { | |
373 | len = type_stack->len; | |
374 | while (len--) | |
375 | CLASSTYPE_MTABLE_ENTRY (tem[len*2]) | |
376 | = (char *)MEMOIZED_CHAIN (CLASSTYPE_MTABLE_ENTRY (tem[len*2])); | |
377 | } | |
378 | if (! use_old) | |
379 | type_stack = pop_type_level (type_stack); | |
380 | else | |
381 | type_stack = (struct type_level *)type_stack->base.prev; | |
382 | } | |
383 | \f | |
384 | /* This can go away when the new searching strategy as a little mileage on it. */ | |
385 | #define NEW_SEARCH 1 | |
386 | #if NEW_SEARCH | |
387 | /* This is the newer recursive depth first one, the old one follows. */ | |
388 | static tree | |
389 | get_binfo_recursive (binfo, is_private, parent, rval, rval_private_ptr, xtype, | |
390 | friends, protect) | |
391 | tree binfo, parent, rval, xtype, friends; | |
392 | int *rval_private_ptr, protect, is_private; | |
393 | { | |
394 | tree binfos; | |
395 | int i, n_baselinks; | |
396 | ||
397 | if (BINFO_TYPE (binfo) == parent) | |
398 | { | |
399 | if (rval == NULL_TREE) | |
400 | { | |
401 | rval = binfo; | |
402 | *rval_private_ptr = is_private; | |
403 | } | |
404 | else | |
405 | { | |
406 | /* I believe it is the case that this error is only an error | |
407 | when used by someone that wants error messages printed. | |
408 | Routines that call this one, that don't set protect want | |
409 | the first one found, even if there are more. */ | |
410 | if (protect) | |
411 | { | |
412 | /* Found two or more possible return values. */ | |
413 | error_with_aggr_type (parent, "type `%s' is ambiguous base class for type `%s'", | |
414 | TYPE_NAME_STRING (xtype)); | |
415 | rval = error_mark_node; | |
416 | } | |
417 | } | |
418 | return rval; | |
419 | } | |
420 | ||
421 | binfos = BINFO_BASETYPES (binfo); | |
422 | n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
423 | ||
424 | /* Process base types. */ | |
425 | for (i = 0; i < n_baselinks; i++) | |
426 | { | |
427 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
428 | ||
429 | if (BINFO_MARKED (base_binfo) == 0) | |
430 | { | |
431 | int via_private = is_private || !TREE_VIA_PUBLIC (base_binfo); | |
432 | ||
433 | SET_BINFO_MARKED (base_binfo); | |
434 | ||
435 | if (via_private == 0) | |
436 | ; | |
437 | else if (protect == 0) | |
438 | via_private = 0; | |
439 | else if (protect == 1 && BINFO_TYPE (binfo) == current_class_type) | |
440 | /* The immediate base class of the class we are in | |
441 | does let its public members through. */ | |
442 | via_private = 0; | |
443 | #ifndef NOJJG | |
444 | else if (protect | |
445 | && friends != NULL_TREE | |
446 | && BINFO_TYPE (binfo) == xtype | |
447 | && value_member (current_class_type, friends)) | |
448 | /* Friend types of the most derived type have access | |
449 | to its baseclass pointers. */ | |
450 | via_private = 0; | |
451 | #endif | |
452 | ||
453 | rval = get_binfo_recursive (base_binfo, via_private, parent, rval, | |
454 | rval_private_ptr, xtype, friends, | |
455 | protect); | |
456 | if (rval == error_mark_node) | |
457 | return rval; | |
458 | } | |
459 | } | |
460 | ||
461 | return rval; | |
462 | } | |
463 | ||
464 | /* Check whether the type given in BINFO is derived from PARENT. If | |
465 | it isn't, return 0. If it is, but the derivation is MI-ambiguous | |
466 | AND protect != 0, emit an error message and return error_mark_node. | |
467 | ||
468 | Otherwise, if TYPE is derived from PARENT, return the actual base | |
469 | information, unless a one of the protection violations below | |
470 | occurs, in which case emit an error message and return error_mark_node. | |
471 | ||
472 | The below should be worded better. It may not be exactly what the code | |
473 | does, but there should be a lose correlation. If you understand the code | |
474 | well, please try and make the comments below more readable. | |
475 | ||
476 | If PROTECT is 1, then check if access to a public field of PARENT | |
477 | would be private. | |
478 | ||
479 | If PROTECT is 2, then check if the given type is derived from | |
480 | PARENT via private visibility rules. | |
481 | ||
482 | If PROTECT is 3, then immediately private baseclass is ok, | |
483 | but deeper than that, check if private. */ | |
484 | tree | |
485 | get_binfo (parent, binfo, protect) | |
486 | register tree parent, binfo; | |
487 | int protect; | |
488 | { | |
489 | tree xtype, type; | |
490 | tree otype; | |
491 | int head = 0, tail = 0; | |
492 | int is_private = 0; | |
493 | tree rval = NULL_TREE; | |
494 | int rval_private = 0; | |
495 | tree friends; | |
496 | ||
497 | #ifdef GATHER_STATISTICS | |
498 | n_calls_get_base_type++; | |
499 | #endif | |
500 | ||
501 | if (TREE_CODE (parent) == TREE_VEC) | |
502 | parent = BINFO_TYPE (parent); | |
503 | /* unions cannot participate in inheritance relationships */ | |
504 | else if (TREE_CODE (parent) == UNION_TYPE) | |
505 | return NULL_TREE; | |
506 | else if (TREE_CODE (parent) != RECORD_TYPE) | |
507 | my_friendly_abort (89); | |
508 | ||
509 | parent = TYPE_MAIN_VARIANT (parent); | |
510 | ||
511 | if (TREE_CODE (binfo) == TREE_VEC) | |
512 | type = BINFO_TYPE (binfo); | |
513 | else if (TREE_CODE (binfo) == RECORD_TYPE) | |
514 | { | |
515 | type = binfo; | |
516 | binfo = TYPE_BINFO (type); | |
517 | } | |
518 | else my_friendly_abort (90); | |
519 | xtype = type; | |
520 | friends = current_class_type ? CLASSTYPE_FRIEND_CLASSES (type) : NULL_TREE; | |
521 | ||
522 | rval = get_binfo_recursive (binfo, is_private, parent, rval, &rval_private, | |
523 | xtype, friends, protect); | |
524 | ||
525 | dfs_walk (binfo, dfs_unmark, markedp); | |
526 | ||
527 | if (rval && protect && rval_private) | |
528 | { | |
529 | if (protect == 3) | |
530 | { | |
531 | tree binfos = BINFO_BASETYPES (TYPE_BINFO (xtype)); | |
532 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
533 | ||
534 | for (i = 0; i < n_baselinks; i++) | |
535 | { | |
536 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
537 | if (parent == BINFO_TYPE (base_binfo)) | |
538 | /* It's ok, since it's immediate. */ | |
539 | return rval; | |
540 | } | |
541 | } | |
542 | error_with_aggr_type (xtype, "type `%s' is derived from private `%s'", | |
543 | TYPE_NAME_STRING (parent)); | |
544 | return error_mark_node; | |
545 | } | |
546 | ||
547 | return rval; | |
548 | } | |
549 | #else | |
550 | /* Check whether the type given in BINFO is derived from PARENT. If | |
551 | it isn't, return 0. If it is, but the derivation is MI-ambiguous | |
552 | AND protect != 0, emit an error message and return error_mark_node. | |
553 | ||
554 | Otherwise, if TYPE is derived from PARENT, return the actual base | |
555 | information, unless a one of the protection violations below | |
556 | occurs, in which case emit an error message and return error_mark_node. | |
557 | ||
558 | The below should be worded better. It may not be exactly what the code | |
559 | does, but there should be a lose correlation. If you understand the code | |
560 | well, please try and make the comments below more readable. | |
561 | ||
562 | If PROTECT is 1, then check if access to a public field of PARENT | |
563 | would be private. | |
564 | ||
565 | If PROTECT is 2, then check if the given type is derived from | |
566 | PARENT via private visibility rules. | |
567 | ||
568 | If PROTECT is 3, then immediately private baseclass is ok, | |
569 | but deeper than that, check if private. */ | |
570 | tree | |
571 | get_binfo (parent, binfo, protect) | |
572 | register tree parent, binfo; | |
573 | int protect; | |
574 | { | |
575 | tree xtype, type; | |
576 | tree otype; | |
577 | int head = 0, tail = 0; | |
578 | int is_private = 0; | |
579 | tree rval = NULL_TREE; | |
580 | int rval_private = 0; | |
581 | tree friends; | |
582 | ||
583 | #ifdef GATHER_STATISTICS | |
584 | n_calls_get_base_type++; | |
585 | #endif | |
586 | ||
587 | if (TREE_CODE (parent) == TREE_VEC) | |
588 | parent = BINFO_TYPE (parent); | |
589 | /* unions cannot participate in inheritance relationships */ | |
590 | else if (TREE_CODE (parent) == UNION_TYPE) | |
591 | return NULL_TREE; | |
592 | else if (TREE_CODE (parent) != RECORD_TYPE) | |
593 | my_friendly_abort (89); | |
594 | ||
595 | parent = TYPE_MAIN_VARIANT (parent); | |
596 | search_stack = push_search_level (search_stack, &search_obstack); | |
597 | ||
598 | if (TREE_CODE (binfo) == TREE_VEC) | |
599 | type = BINFO_TYPE (binfo); | |
600 | else if (TREE_CODE (binfo) == RECORD_TYPE) | |
601 | { | |
602 | type = binfo; | |
603 | binfo = TYPE_BINFO (type); | |
604 | } | |
605 | else my_friendly_abort (90); | |
606 | xtype = type; | |
607 | friends = current_class_type ? CLASSTYPE_FRIEND_CLASSES (type) : NULL_TREE; | |
608 | ||
609 | while (1) | |
610 | { | |
611 | tree binfos = BINFO_BASETYPES (binfo); | |
612 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
613 | ||
614 | /* Process and/or queue base types. */ | |
615 | for (i = 0; i < n_baselinks; i++) | |
616 | { | |
617 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
618 | ||
619 | if (BINFO_MARKED (base_binfo) == 0) | |
620 | { | |
621 | int via_private = is_private || !TREE_VIA_PUBLIC (base_binfo); | |
622 | ||
623 | SET_BINFO_MARKED (base_binfo); | |
624 | ||
625 | if (via_private == 0) | |
626 | ; | |
627 | else if (protect == 0) | |
628 | via_private = 0; | |
629 | else if (protect == 1 && BINFO_TYPE (binfo) == current_class_type) | |
630 | /* The immediate base class of the class we are in | |
631 | does let its public members through. */ | |
632 | via_private = 0; | |
633 | #ifndef NOJJG | |
634 | else if (protect | |
635 | && friends != NULL_TREE | |
636 | && BINFO_TYPE (binfo) == xtype | |
637 | && value_member (current_class_type, friends)) | |
638 | /* Friend types of the most derived type have access | |
639 | to its baseclass pointers. */ | |
640 | via_private = 0; | |
641 | #endif | |
642 | ||
643 | otype = type; | |
644 | obstack_ptr_grow (&search_obstack, base_binfo); | |
645 | obstack_ptr_grow (&search_obstack, (void *) via_private); | |
646 | tail += 2; | |
647 | if (tail >= search_stack->limit) | |
648 | my_friendly_abort (91); | |
649 | } | |
650 | #if 0 | |
651 | /* This code cannot possibly be right. Ambiguities can only be | |
652 | checked by traversing the whole tree, and seeing if it pops | |
653 | up twice. */ | |
654 | else if (protect && ! TREE_VIA_VIRTUAL (base_binfo)) | |
655 | { | |
656 | error_with_aggr_type (parent, "type `%s' is ambiguous base class for type `%s'", | |
657 | TYPE_NAME_STRING (xtype)); | |
658 | error ("(base class for types `%s' and `%s')", | |
659 | TYPE_NAME_STRING (BINFO_TYPE (binfo)), | |
660 | TYPE_NAME_STRING (otype)); | |
661 | rval = error_mark_node; | |
662 | goto cleanup; | |
663 | } | |
664 | #endif | |
665 | } | |
666 | ||
667 | dont_queue: | |
668 | /* Process head of queue, if one exists. */ | |
669 | if (head >= tail) | |
670 | break; | |
671 | ||
672 | binfo = search_stack->first[head++]; | |
673 | is_private = (int) search_stack->first[head++]; | |
674 | if (BINFO_TYPE (binfo) == parent) | |
675 | { | |
676 | if (rval == 0) | |
677 | { | |
678 | rval = binfo; | |
679 | rval_private = is_private; | |
680 | } | |
681 | else | |
682 | /* I believe it is the case that this error is only an error when | |
683 | used by someone that wants error messages printed. Routines that | |
684 | call this one, that don't set protect want the first one found, | |
685 | even if there are more. */ | |
686 | if (protect) | |
687 | { | |
688 | /* Found two or more possible return values. */ | |
689 | error_with_aggr_type (parent, "type `%s' is ambiguous base class for type `%s'", | |
690 | TYPE_NAME_STRING (xtype)); | |
691 | rval = error_mark_node; | |
692 | goto cleanup; | |
693 | } | |
694 | goto dont_queue; | |
695 | } | |
696 | } | |
697 | ||
698 | cleanup: | |
699 | { | |
700 | tree *tp = search_stack->first; | |
701 | tree *search_tail = tp + tail; | |
702 | ||
703 | while (tp < search_tail) | |
704 | { | |
705 | CLEAR_BINFO_MARKED (*tp); | |
706 | tp += 2; | |
707 | } | |
708 | } | |
709 | search_stack = pop_search_level (search_stack); | |
710 | ||
711 | if (rval == error_mark_node) | |
712 | return error_mark_node; | |
713 | ||
714 | if (rval && protect && rval_private) | |
715 | { | |
716 | if (protect == 3) | |
717 | { | |
718 | tree binfos = BINFO_BASETYPES (TYPE_BINFO (xtype)); | |
719 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
720 | ||
721 | for (i = 0; i < n_baselinks; i++) | |
722 | { | |
723 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
724 | if (parent == BINFO_TYPE (base_binfo)) | |
725 | /* It's ok, since it's immediate. */ | |
726 | return rval; | |
727 | } | |
728 | } | |
729 | error_with_aggr_type (xtype, "type `%s' is derived from private `%s'", | |
730 | TYPE_NAME_STRING (parent)); | |
731 | return error_mark_node; | |
732 | } | |
733 | ||
734 | return rval; | |
735 | } | |
736 | #endif | |
737 | ||
738 | #if NEW_SEARCH | |
739 | /* This is the newer depth first get_base_distance, the older one follows. */ | |
740 | static | |
741 | get_base_distance_recursive (binfo, depth, is_private, basetype_path, rval, | |
742 | rval_private_ptr, new_binfo_ptr, parent, path_ptr, | |
743 | protect, via_virtual_ptr, via_virtual) | |
744 | tree binfo, basetype_path, *new_binfo_ptr, parent, *path_ptr; | |
745 | int *rval_private_ptr, depth, is_private, rval, protect, *via_virtual_ptr, | |
746 | via_virtual; | |
747 | { | |
748 | tree binfos; | |
749 | int i, n_baselinks; | |
750 | ||
751 | if (BINFO_TYPE (binfo) == parent) | |
752 | { | |
753 | if (rval == -1) | |
754 | { | |
755 | rval = depth; | |
756 | *rval_private_ptr = is_private; | |
757 | *new_binfo_ptr = binfo; | |
758 | *via_virtual_ptr = via_virtual; | |
759 | } | |
760 | else | |
761 | { | |
762 | int same_object = tree_int_cst_equal (BINFO_OFFSET (*new_binfo_ptr), | |
763 | BINFO_OFFSET (binfo)); | |
764 | ||
765 | if (*via_virtual_ptr && via_virtual==0) | |
766 | { | |
767 | *rval_private_ptr = is_private; | |
768 | *new_binfo_ptr = binfo; | |
769 | *via_virtual_ptr = via_virtual; | |
770 | } | |
771 | else if (same_object) | |
772 | { | |
773 | /* Note, this should probably succeed to find, and | |
774 | override the old one if the old one was private and | |
775 | this one isn't. */ | |
776 | return rval; | |
777 | } | |
778 | ||
779 | rval = -2; | |
780 | } | |
781 | return rval; | |
782 | } | |
783 | ||
784 | binfos = BINFO_BASETYPES (binfo); | |
785 | n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
786 | depth += 1; | |
787 | ||
788 | /* Process base types. */ | |
789 | for (i = 0; i < n_baselinks; i++) | |
790 | { | |
791 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
792 | ||
793 | if (BINFO_MARKED (base_binfo) == 0) | |
794 | { | |
795 | int via_private = is_private || !TREE_VIA_PUBLIC (base_binfo); | |
796 | int was; | |
797 | ||
798 | /* When searching for a non-virtual, we cannot mark | |
799 | virtually found binfos. */ | |
800 | if (!via_virtual) | |
801 | SET_BINFO_MARKED (base_binfo); | |
802 | ||
803 | if (via_private == 0) | |
804 | ; | |
805 | else if (protect == 0) | |
806 | via_private = 0; | |
807 | ||
808 | #define WATCH_VALUES(rval, via_private) (rval == -1 ? 3 : via_private) | |
809 | ||
810 | was = WATCH_VALUES (rval, *via_virtual_ptr); | |
811 | rval = get_base_distance_recursive (base_binfo, depth, via_private, | |
812 | binfo, rval, rval_private_ptr, | |
813 | new_binfo_ptr, parent, path_ptr, | |
814 | protect, via_virtual_ptr, | |
815 | TREE_VIA_VIRTUAL (base_binfo)|via_virtual); | |
816 | /* watch for updates, only update, if path is good. */ | |
817 | if (path_ptr && WATCH_VALUES (rval, *via_virtual_ptr) != was) | |
818 | BINFO_INHERITANCE_CHAIN (base_binfo) = binfo; | |
819 | if (rval == -2 && *via_virtual_ptr == 0) | |
820 | return rval; | |
821 | ||
822 | #undef WATCH_VALUES | |
823 | ||
824 | } | |
825 | } | |
826 | ||
827 | return rval; | |
828 | } | |
829 | ||
830 | /* Return the number of levels between type PARENT and the type given | |
831 | in BINFO, following the leftmost path to PARENT not found along a | |
832 | virtual path, if there are no real PARENTs (all come from virtual | |
833 | base classes), then follow the leftmost path to PARENT. | |
834 | ||
835 | Return -1 if TYPE is not derived from PARENT. | |
836 | Return -2 if PARENT is an ambiguous base class of TYPE. | |
837 | Return -3 if PARENT is private to TYPE, and protect is non-zero. | |
838 | ||
839 | If PATH_PTR is non-NULL, then also build the list of types | |
840 | from PARENT to TYPE, with TREE_VIA_VIRUAL and TREE_VIA_PUBLIC | |
841 | set. | |
842 | ||
843 | It is unclear whether or not the path should be built if -2 and/or | |
844 | -3 is returned. Maybe, maybe not. I suspect that there is code | |
845 | that relies upon it being built, such as prepare_fresh_vtable. | |
846 | (mrs) | |
847 | ||
848 | Also, it would appear that we only sometimes want -2. The question is | |
849 | under what exact conditions do we want to see -2, and when do we not | |
850 | want to see -2. (mrs) | |
851 | ||
852 | It is also unlikely that this thing finds all ambiguties, as I | |
853 | don't trust any deviation from the method used in get_binfo. It | |
854 | would be nice to use that method here, as it is simple and straight | |
855 | forward. The code here and in recursive_bounded_basetype_p is not. | |
856 | For now, I shall include an extra call to find ambiguities. (mrs) | |
857 | */ | |
858 | ||
859 | int | |
860 | get_base_distance (parent, binfo, protect, path_ptr) | |
861 | register tree parent, binfo; | |
862 | int protect; | |
863 | tree *path_ptr; | |
864 | { | |
865 | int head, tail; | |
866 | int is_private = 0; | |
867 | int rval = -1; | |
868 | int depth = 0; | |
869 | int rval_private = 0; | |
870 | tree type, basetype_path; | |
871 | tree friends; | |
872 | int use_leftmost; | |
873 | tree new_binfo; | |
874 | int via_virtual; | |
875 | ||
876 | if (TYPE_READONLY (parent) || TYPE_VOLATILE (parent)) | |
877 | parent = TYPE_MAIN_VARIANT (parent); | |
878 | use_leftmost = (parent == TYPE_MAIN_VARIANT (parent)); | |
879 | ||
880 | if (TREE_CODE (binfo) == TREE_VEC) | |
881 | type = BINFO_TYPE (binfo); | |
882 | else if (TREE_CODE (binfo) == RECORD_TYPE) | |
883 | { | |
884 | type = binfo; | |
885 | binfo = TYPE_BINFO (type); | |
886 | } | |
887 | else my_friendly_abort (92); | |
888 | ||
889 | friends = current_class_type ? CLASSTYPE_FRIEND_CLASSES (type) : NULL_TREE; | |
890 | ||
891 | if (path_ptr) | |
892 | { | |
893 | basetype_path = TYPE_BINFO (type); | |
894 | BINFO_INHERITANCE_CHAIN (basetype_path) = NULL_TREE; | |
895 | } | |
896 | ||
897 | if (TYPE_MAIN_VARIANT (parent) == type) | |
898 | { | |
899 | /* If the distance is 0, then we don't really need | |
900 | a path pointer, but we shouldn't let garbage go back. */ | |
901 | if (path_ptr) | |
902 | *path_ptr = basetype_path; | |
903 | return 0; | |
904 | } | |
905 | ||
906 | rval = get_base_distance_recursive (binfo, 0, 0, NULL_TREE, rval, | |
907 | &rval_private, &new_binfo, parent, | |
908 | path_ptr, protect, &via_virtual, 0); | |
909 | ||
910 | if (path_ptr) | |
911 | BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE; | |
912 | ||
913 | basetype_path = binfo; | |
914 | ||
915 | dfs_walk (binfo, dfs_unmark, markedp); | |
916 | ||
917 | binfo = new_binfo; | |
918 | ||
919 | /* Visibilities don't count if we found an ambiguous basetype. */ | |
920 | if (rval == -2) | |
921 | rval_private = 0; | |
922 | ||
923 | if (rval && protect && rval_private) | |
924 | return -3; | |
925 | ||
926 | if (path_ptr) | |
927 | *path_ptr = binfo; | |
928 | return rval; | |
929 | } | |
930 | #else | |
931 | /* Recursively search for a path from PARENT to BINFO. | |
932 | If RVAL is > 0 and we succeed, update the BINFO_INHERITANCE_CHAIN | |
933 | pointers. | |
934 | If we find a distinct basetype that's not the one from BINFO, | |
935 | return -2; | |
936 | If we don't find any path, return 0. | |
937 | ||
938 | If we encounter a virtual basetype on the path, return RVAL | |
939 | and don't change any pointers after that point. */ | |
940 | static int | |
941 | recursive_bounded_basetype_p (parent, binfo, rval, update_chain) | |
942 | tree parent, binfo; | |
943 | int rval; | |
944 | int update_chain; | |
945 | { | |
946 | tree binfos; | |
947 | ||
948 | if (BINFO_TYPE (parent) == BINFO_TYPE (binfo)) | |
949 | { | |
950 | if (tree_int_cst_equal (BINFO_OFFSET (parent), BINFO_OFFSET (binfo))) | |
951 | return rval; | |
952 | return -2; | |
953 | } | |
954 | ||
955 | if (TREE_VIA_VIRTUAL (binfo)) | |
956 | update_chain = 0; | |
957 | ||
958 | if (binfos = BINFO_BASETYPES (binfo)) | |
959 | { | |
960 | int i, nval; | |
961 | for (i = 0; i < TREE_VEC_LENGTH (binfos); i++) | |
962 | { | |
963 | nval = recursive_bounded_basetype_p (parent, TREE_VEC_ELT (binfos, i), | |
964 | rval, update_chain); | |
965 | if (nval < 0) | |
966 | return nval; | |
967 | if (nval > 0 && update_chain) | |
968 | BINFO_INHERITANCE_CHAIN (TREE_VEC_ELT (binfos, i)) = binfo; | |
969 | } | |
970 | return rval; | |
971 | } | |
972 | return 0; | |
973 | } | |
974 | ||
975 | /* -------------------------------------------------- */ | |
976 | /* These two routines are ONLY here to check for ambiguities for | |
977 | get_base_distance, as it probably cannot check by itself for | |
978 | all ambiguities. When get_base_distance is sure to check for all, | |
979 | these routines can go. (mrs) */ | |
980 | ||
981 | static tree | |
982 | get_binfo2_recursive (binfo, parent, type) | |
983 | register tree binfo, parent; | |
984 | tree type; | |
985 | { | |
986 | tree rval = NULL_TREE; | |
987 | tree nrval; | |
988 | tree binfos = BINFO_BASETYPES (binfo); | |
989 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
990 | ||
991 | if (BINFO_TYPE (binfo) == parent) | |
992 | { | |
993 | return binfo; | |
994 | } | |
995 | ||
996 | /* Process base types. */ | |
997 | for (i = 0; i < n_baselinks; i++) | |
998 | { | |
999 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
1000 | ||
1001 | if (BINFO_MARKED (base_binfo) == 0) | |
1002 | { | |
1003 | SET_BINFO_MARKED (base_binfo); | |
1004 | ||
1005 | nrval = get_binfo2_recursive (base_binfo, parent, type); | |
1006 | ||
1007 | if (nrval == error_mark_node) | |
1008 | return nrval; | |
1009 | if (nrval) | |
1010 | if (rval == 0) | |
1011 | { | |
1012 | rval = nrval; | |
1013 | } | |
1014 | else | |
1015 | return error_mark_node; | |
1016 | } | |
1017 | } | |
1018 | return rval; | |
1019 | } | |
1020 | ||
1021 | static tree | |
1022 | get_binfo2 (parent, binfo) | |
1023 | register tree parent, binfo; | |
1024 | { | |
1025 | tree type; | |
1026 | tree rval = NULL_TREE; | |
1027 | ||
1028 | if (TREE_CODE (parent) == TREE_VEC) | |
1029 | parent = BINFO_TYPE (parent); | |
1030 | /* unions cannot participate in inheritance relationships */ | |
1031 | else if (TREE_CODE (parent) == UNION_TYPE) | |
1032 | return 0; | |
1033 | else if (TREE_CODE (parent) != RECORD_TYPE) | |
1034 | my_friendly_abort (89); | |
1035 | ||
1036 | parent = TYPE_MAIN_VARIANT (parent); | |
1037 | ||
1038 | if (TREE_CODE (binfo) == TREE_VEC) | |
1039 | type = BINFO_TYPE (binfo); | |
1040 | else if (TREE_CODE (binfo) == RECORD_TYPE) | |
1041 | { | |
1042 | type = binfo; | |
1043 | binfo = TYPE_BINFO (type); | |
1044 | } | |
1045 | else my_friendly_abort (90); | |
1046 | ||
1047 | rval = get_binfo2_recursive (binfo, parent, type); | |
1048 | ||
1049 | dfs_walk (binfo, dfs_unmark, markedp); | |
1050 | ||
1051 | return rval; | |
1052 | } | |
1053 | ||
1054 | /* -------------------------------------------------- */ | |
1055 | ||
1056 | /* Return the number of levels between type PARENT and the type given | |
1057 | in BINFO, following the leftmost path to PARENT. If PARENT is its | |
1058 | own main type variant, then if PARENT appears in different places | |
1059 | from TYPE's point of view, the leftmost PARENT will be the one | |
1060 | chosen. | |
1061 | ||
1062 | Return -1 if TYPE is not derived from PARENT. | |
1063 | Return -2 if PARENT is an ambiguous base class of TYPE. | |
1064 | Return -3 if PARENT is private to TYPE, and protect is non-zero. | |
1065 | ||
1066 | If PATH_PTR is non-NULL, then also build the list of types | |
1067 | from PARENT to TYPE, with TREE_VIA_VIRUAL and TREE_VIA_PUBLIC | |
1068 | set. | |
1069 | ||
1070 | It is unclear whether or not the path should be built if -2 and/or | |
1071 | -3 is returned. Maybe, maybe not. I suspect that there is code | |
1072 | that relies upon it being built, such as prepare_fresh_vtable. | |
1073 | (mrs) | |
1074 | ||
1075 | Also, it would appear that we only sometimes want -2. The question is | |
1076 | under what exact conditions do we want to see -2, and when do we not | |
1077 | want to see -2. (mrs) | |
1078 | ||
1079 | It is also unlikely that this thing finds all ambiguties, as I | |
1080 | don't trust any deviation from the method used in get_binfo. It | |
1081 | would be nice to use that method here, as it is simple and straight | |
1082 | forward. The code here and in recursive_bounded_basetype_p is not. | |
1083 | For now, I shall include an extra call to find ambiguities. (mrs) | |
1084 | */ | |
1085 | ||
1086 | int | |
1087 | get_base_distance (parent, binfo, protect, path_ptr) | |
1088 | register tree parent, binfo; | |
1089 | int protect; | |
1090 | tree *path_ptr; | |
1091 | { | |
1092 | int head, tail; | |
1093 | int is_private = 0; | |
1094 | int rval = -1; | |
1095 | int depth = 0; | |
1096 | int rval_private = 0; | |
1097 | tree type, basetype_path; | |
1098 | tree friends; | |
1099 | int use_leftmost; | |
1100 | ||
1101 | if (TYPE_READONLY (parent) || TYPE_VOLATILE (parent)) | |
1102 | parent = TYPE_MAIN_VARIANT (parent); | |
1103 | use_leftmost = (parent == TYPE_MAIN_VARIANT (parent)); | |
1104 | ||
1105 | if (TREE_CODE (binfo) == TREE_VEC) | |
1106 | type = BINFO_TYPE (binfo); | |
1107 | else if (TREE_CODE (binfo) == RECORD_TYPE) | |
1108 | { | |
1109 | type = binfo; | |
1110 | binfo = TYPE_BINFO (type); | |
1111 | } | |
1112 | else if (TREE_CODE (binfo) == UNION_TYPE) | |
1113 | { | |
1114 | /* UNION_TYPEs do not participate in inheritance relationships. */ | |
1115 | return -1; | |
1116 | } | |
1117 | else my_friendly_abort (92); | |
1118 | ||
1119 | friends = current_class_type ? CLASSTYPE_FRIEND_CLASSES (type) : NULL_TREE; | |
1120 | ||
1121 | if (path_ptr) | |
1122 | { | |
1123 | basetype_path = TYPE_BINFO (type); | |
1124 | BINFO_INHERITANCE_CHAIN (basetype_path) = NULL_TREE; | |
1125 | } | |
1126 | ||
1127 | if (TYPE_MAIN_VARIANT (parent) == type) | |
1128 | { | |
1129 | /* If the distance is 0, then we don't really need | |
1130 | a path pointer, but we shouldn't let garbage go back. */ | |
1131 | if (path_ptr) | |
1132 | *path_ptr = basetype_path; | |
1133 | return 0; | |
1134 | } | |
1135 | ||
1136 | search_stack = push_search_level (search_stack, &search_obstack); | |
1137 | ||
1138 | /* Keep space for TYPE. */ | |
1139 | obstack_ptr_grow (&search_obstack, binfo); | |
1140 | obstack_ptr_grow (&search_obstack, NULL_PTR); | |
1141 | obstack_ptr_grow (&search_obstack, NULL_PTR); | |
1142 | if (path_ptr) | |
1143 | { | |
1144 | obstack_ptr_grow (&search_obstack, NULL_PTR); | |
1145 | head = 4; | |
1146 | } | |
1147 | else head = 3; | |
1148 | tail = head; | |
1149 | ||
1150 | while (1) | |
1151 | { | |
1152 | tree binfos = BINFO_BASETYPES (binfo); | |
1153 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
1154 | ||
1155 | /* Process and/or queue base types. */ | |
1156 | for (i = 0; i < n_baselinks; i++) | |
1157 | { | |
1158 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
1159 | ||
1160 | if (BINFO_MARKED (base_binfo) == 0) | |
1161 | { | |
1162 | int via_private = is_private || !TREE_VIA_PUBLIC (base_binfo); | |
1163 | ||
1164 | SET_BINFO_MARKED (base_binfo); | |
1165 | ||
1166 | if (via_private == 0) | |
1167 | ; | |
1168 | else if (protect == 0) | |
1169 | via_private = 0; | |
1170 | ||
1171 | obstack_ptr_grow (&search_obstack, base_binfo); | |
1172 | obstack_ptr_grow (&search_obstack, (HOST_WIDE_INT) depth); | |
1173 | obstack_ptr_grow (&search_obstack, (HOST_WIDE_INT) via_private); | |
1174 | tail += 3; | |
1175 | if (path_ptr) | |
1176 | { | |
1177 | obstack_ptr_grow (&search_obstack, basetype_path); | |
1178 | tail += 1; | |
1179 | } | |
1180 | if (tail >= search_stack->limit) | |
1181 | my_friendly_abort (93); | |
1182 | } | |
1183 | #if 0 | |
1184 | /* This code cannot possibly be right. Ambiguities can only be | |
1185 | checked by traversing the whole tree, and seeing if it pops | |
1186 | up twice. */ | |
1187 | else if (! TREE_VIA_VIRTUAL (base_binfo)) | |
1188 | { | |
1189 | rval = -2; | |
1190 | goto done; | |
1191 | } | |
1192 | #endif | |
1193 | } | |
1194 | ||
1195 | /* Process head of queue, if one exists. */ | |
1196 | if (head >= tail) | |
1197 | break; | |
1198 | ||
1199 | binfo = search_stack->first[head++]; | |
1200 | depth = (int) search_stack->first[head++] + 1; | |
1201 | is_private = (int) search_stack->first[head++]; | |
1202 | if (path_ptr) | |
1203 | { | |
1204 | basetype_path = search_stack->first[head++]; | |
1205 | BINFO_INHERITANCE_CHAIN (binfo) = basetype_path; | |
1206 | basetype_path = binfo; | |
1207 | } | |
1208 | if (BINFO_TYPE (binfo) == parent) | |
1209 | { | |
1210 | /* It is wrong to set this and break, the proper thing to do | |
1211 | would be to set it only if it has not been set before, | |
1212 | and if is has been set, an ambiguity exists, and just | |
1213 | continue searching the tree for more of them as is done | |
1214 | in get_binfo. But until the code below can cope, this | |
1215 | can't be done. Also, it is not clear what should happen if | |
1216 | use_leftmost is set. */ | |
1217 | rval = depth; | |
1218 | rval_private = is_private; | |
1219 | break; | |
1220 | } | |
1221 | } | |
1222 | #if 0 | |
1223 | /* Unneeded now, as we know the above code in the #if 0 is wrong. */ | |
1224 | done: | |
1225 | #endif | |
1226 | { | |
1227 | int increment = path_ptr ? 4 : 3; | |
1228 | tree *tp = search_stack->first; | |
1229 | tree *search_tail = tp + tail; | |
1230 | ||
1231 | /* We can skip the first entry, since it wasn't marked. */ | |
1232 | tp += increment; | |
1233 | ||
1234 | basetype_path = binfo; | |
1235 | while (tp < search_tail) | |
1236 | { | |
1237 | CLEAR_BINFO_MARKED (*tp); | |
1238 | tp += increment; | |
1239 | } | |
1240 | ||
1241 | /* Now, guarantee that we are following the leftmost path in the | |
1242 | chain. Algorithm: the search stack holds tuples in BFS order. | |
1243 | The last tuple on the search stack contains the tentative binfo | |
1244 | for the basetype we are looking for. We know that starting | |
1245 | with FIRST, each tuple with only a single basetype must be on | |
1246 | the leftmost path. Each time we come to a split, we select | |
1247 | the tuple for the leftmost basetype that can reach the ultimate | |
1248 | basetype. */ | |
1249 | ||
1250 | if (use_leftmost | |
1251 | && rval > 0 | |
1252 | && (! BINFO_OFFSET_ZEROP (binfo) || TREE_VIA_VIRTUAL (binfo))) | |
1253 | { | |
1254 | tree tp_binfos; | |
1255 | ||
1256 | /* Farm out the tuples with a single basetype. */ | |
1257 | for (tp = search_stack->first; tp < search_tail; tp += increment) | |
1258 | { | |
1259 | tp_binfos = BINFO_BASETYPES (*tp); | |
1260 | if (tp_binfos && TREE_VEC_LENGTH (tp_binfos) > 1) | |
1261 | break; | |
1262 | } | |
1263 | ||
1264 | if (tp < search_tail) | |
1265 | { | |
1266 | /* Pick the best path. */ | |
1267 | tree base_binfo; | |
1268 | int i; | |
1269 | int nrval = rval; | |
1270 | for (i = 0; i < TREE_VEC_LENGTH (tp_binfos); i++) | |
1271 | { | |
1272 | base_binfo = TREE_VEC_ELT (tp_binfos, i); | |
1273 | if (tp+((i+1)*increment) < search_tail) | |
1274 | my_friendly_assert (base_binfo == tp[(i+1)*increment], 295); | |
1275 | if (nrval = recursive_bounded_basetype_p (binfo, base_binfo, rval, 1)) | |
1276 | break; | |
1277 | } | |
1278 | rval = nrval; | |
1279 | if (rval > 0) | |
1280 | BINFO_INHERITANCE_CHAIN (base_binfo) = *tp; | |
1281 | } | |
1282 | ||
1283 | /* Because I don't trust recursive_bounded_basetype_p to find | |
1284 | all ambiguities, I will just make sure here. When it is | |
1285 | sure that all ambiguities are found, the two routines and | |
1286 | this call can be removed. Not toally sure this should be | |
1287 | here, but I think it should. (mrs) */ | |
1288 | ||
1289 | if (get_binfo2 (parent, type) == error_mark_node && rval != -2) | |
1290 | { | |
1291 | #if 1 | |
1292 | /* This warning is here because the code over in | |
1293 | prepare_fresh_vtable relies on partial completion | |
1294 | offered by recursive_bounded_basetype_p I think, but | |
1295 | that behavior is not documented. It needs to be. I | |
1296 | don't think prepare_fresh_vtable is the only routine | |
1297 | that relies upon path_ptr being set to something in a | |
1298 | particular way when this routine returns -2. (mrs) */ | |
1299 | /* See PR 428 for a test case that can tickle this. */ | |
1300 | warning ("internal consistency check failed, please report, recovering."); | |
1301 | rval = -2; | |
1302 | #endif | |
1303 | } | |
1304 | ||
1305 | /* Visibilities don't count if we found an ambiguous basetype. */ | |
1306 | if (rval == -2) | |
1307 | rval_private = 0; | |
1308 | } | |
1309 | } | |
1310 | search_stack = pop_search_level (search_stack); | |
1311 | ||
1312 | if (rval && protect && rval_private) | |
1313 | return -3; | |
1314 | ||
1315 | if (path_ptr) | |
1316 | *path_ptr = binfo; | |
1317 | return rval; | |
1318 | } | |
1319 | #endif | |
1320 | ||
1321 | /* Search for a member with name NAME in a multiple inheritance lattice | |
1322 | specified by TYPE. If it does not exist, return NULL_TREE. | |
1323 | If the member is ambiguously referenced, return `error_mark_node'. | |
1324 | Otherwise, return the FIELD_DECL. */ | |
1325 | ||
1326 | /* Do a 1-level search for NAME as a member of TYPE. The caller | |
1327 | must figure out whether it has a visible path to this field. | |
1328 | (Since it is only one level, this is reasonable.) */ | |
1329 | static tree | |
1330 | lookup_field_1 (type, name) | |
1331 | tree type, name; | |
1332 | { | |
1333 | register tree field = TYPE_FIELDS (type); | |
1334 | ||
1335 | #ifdef GATHER_STATISTICS | |
1336 | n_calls_lookup_field_1++; | |
1337 | #endif | |
1338 | while (field) | |
1339 | { | |
1340 | #ifdef GATHER_STATISTICS | |
1341 | n_fields_searched++; | |
1342 | #endif | |
1343 | if (DECL_NAME (field) == NULL_TREE | |
1344 | && TREE_CODE (TREE_TYPE (field)) == UNION_TYPE) | |
1345 | { | |
1346 | tree temp = lookup_field_1 (TREE_TYPE (field), name); | |
1347 | if (temp) | |
1348 | return temp; | |
1349 | } | |
1350 | if (DECL_NAME (field) == name) | |
1351 | { | |
1352 | if ((TREE_CODE(field) == VAR_DECL || TREE_CODE(field) == CONST_DECL) | |
1353 | && DECL_ASSEMBLER_NAME (field) != NULL) | |
1354 | GNU_xref_ref(current_function_decl, | |
1355 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (field))); | |
1356 | return field; | |
1357 | } | |
1358 | field = TREE_CHAIN (field); | |
1359 | } | |
1360 | /* Not found. */ | |
1361 | if (name == _vptr_name) | |
1362 | { | |
1363 | /* Give the user what s/he thinks s/he wants. */ | |
1364 | if (TYPE_VIRTUAL_P (type)) | |
1365 | return CLASSTYPE_VFIELD (type); | |
1366 | } | |
1367 | return NULL_TREE; | |
1368 | } | |
1369 | ||
1370 | /* Compute the visibility of FIELD. This is done by computing | |
1371 | the visibility available to each type in BASETYPES (which comes | |
1372 | as a list of [via_public/basetype] in reverse order, namely base | |
1373 | class before derived class). The first one which defines a | |
1374 | visibility defines the visibility for the field. Otherwise, the | |
1375 | visibility of the field is that which occurs normally. | |
1376 | ||
1377 | Uses global variables CURRENT_CLASS_TYPE and | |
1378 | CURRENT_FUNCTION_DECL to use friend relationships | |
1379 | if necessary. | |
1380 | ||
1381 | This will be static when lookup_fnfield comes into this file. */ | |
1382 | ||
1383 | #define PUBLIC_RETURN return (DECL_PUBLIC (field) = 1), visibility_public | |
1384 | #define PROTECTED_RETURN return (DECL_PROTECTED (field) = 1), visibility_protected | |
1385 | #define PRIVATE_RETURN return (DECL_PRIVATE (field) = 1), visibility_private | |
1386 | ||
1387 | enum visibility_type | |
1388 | compute_visibility (basetype_path, field) | |
1389 | tree basetype_path, field; | |
1390 | { | |
1391 | enum visibility_type visibility = visibility_public; | |
1392 | tree types; | |
1393 | tree context = DECL_CLASS_CONTEXT (field); | |
1394 | ||
1395 | /* Fields coming from nested anonymous unions have their DECL_CLASS_CONTEXT | |
1396 | slot set to the union type rather than the record type containing | |
1397 | the anonymous union. In this case, DECL_FIELD_CONTEXT is correct. */ | |
1398 | if (context && TREE_CODE (context) == UNION_TYPE | |
1399 | && ANON_AGGRNAME_P (TYPE_IDENTIFIER (context))) | |
1400 | context = DECL_FIELD_CONTEXT (field); | |
1401 | ||
1402 | /* Virtual function tables are never private. | |
1403 | But we should know that we are looking for this, | |
1404 | and not even try to hide it. */ | |
1405 | if (DECL_NAME (field) && VFIELD_NAME_P (DECL_NAME (field)) == 1) | |
1406 | return visibility_public; | |
1407 | ||
1408 | /* Member function manipulating its own members. */ | |
1409 | if (current_class_type == context | |
1410 | || (context && current_class_type == TYPE_MAIN_VARIANT (context))) | |
1411 | PUBLIC_RETURN; | |
1412 | ||
1413 | /* Make these special cases fast. */ | |
1414 | if (BINFO_TYPE (basetype_path) == current_class_type) | |
1415 | { | |
1416 | if (DECL_PUBLIC (field)) | |
1417 | return visibility_public; | |
1418 | if (DECL_PROTECTED (field)) | |
1419 | return visibility_protected; | |
1420 | if (DECL_PRIVATE (field)) | |
1421 | return visibility_private; | |
1422 | } | |
1423 | ||
1424 | /* Member found immediately within object. */ | |
1425 | if (BINFO_INHERITANCE_CHAIN (basetype_path) == NULL_TREE) | |
1426 | { | |
1427 | /* At object's top level, public members are public. */ | |
1428 | if (TREE_PROTECTED (field) == 0 && TREE_PRIVATE (field) == 0) | |
1429 | PUBLIC_RETURN; | |
1430 | ||
1431 | /* Friend function manipulating members it gets (for being a friend). */ | |
1432 | if (is_friend (context, current_function_decl)) | |
1433 | PUBLIC_RETURN; | |
1434 | ||
1435 | /* Inner than that, without special visibility, | |
1436 | ||
1437 | protected members are ok if type of object is current_class_type | |
1438 | is derived therefrom. This means that if the type of the object | |
1439 | is a base type for our current class type, we cannot access | |
1440 | protected members. | |
1441 | ||
1442 | private members are not ok. */ | |
1443 | if (current_class_type && DECL_VISIBILITY (field) == NULL_TREE) | |
1444 | { | |
1445 | if (TREE_PRIVATE (field)) | |
1446 | PRIVATE_RETURN; | |
1447 | ||
1448 | if (TREE_PROTECTED (field)) | |
1449 | { | |
1450 | if (context == current_class_type | |
1451 | || UNIQUELY_DERIVED_FROM_P (context, current_class_type)) | |
1452 | PUBLIC_RETURN; | |
1453 | else | |
1454 | PROTECTED_RETURN; | |
1455 | } | |
1456 | else my_friendly_abort (94); | |
1457 | } | |
1458 | } | |
1459 | /* Friend function manipulating members it gets (for being a friend). */ | |
1460 | if (is_friend (context, current_function_decl)) | |
1461 | PUBLIC_RETURN; | |
1462 | ||
1463 | /* must reverse more than one element */ | |
1464 | basetype_path = reverse_path (basetype_path); | |
1465 | types = basetype_path; | |
1466 | ||
1467 | while (types) | |
1468 | { | |
1469 | tree member; | |
1470 | tree binfo = types; | |
1471 | tree type = BINFO_TYPE (binfo); | |
1472 | ||
1473 | member = purpose_member (type, DECL_VISIBILITY (field)); | |
1474 | if (member) | |
1475 | { | |
1476 | visibility = (enum visibility_type)TREE_VALUE (member); | |
1477 | if (visibility == visibility_public | |
1478 | || is_friend (type, current_function_decl) | |
1479 | || (visibility == visibility_protected | |
1480 | && current_class_type | |
1481 | && UNIQUELY_DERIVED_FROM_P (context, current_class_type))) | |
1482 | visibility = visibility_public; | |
1483 | goto ret; | |
1484 | } | |
1485 | ||
1486 | /* Friends inherit the visibility of the class they inherit from. */ | |
1487 | if (is_friend (type, current_function_decl)) | |
1488 | { | |
1489 | if (type == context) | |
1490 | { | |
1491 | visibility = visibility_public; | |
1492 | goto ret; | |
1493 | } | |
1494 | if (TREE_PROTECTED (field)) | |
1495 | { | |
1496 | visibility = visibility_public; | |
1497 | goto ret; | |
1498 | } | |
1499 | #if 0 | |
1500 | /* This short-cut is too short. */ | |
1501 | if (visibility == visibility_public) | |
1502 | goto ret; | |
1503 | #endif | |
1504 | /* else, may be a friend of a deeper base class */ | |
1505 | } | |
1506 | ||
1507 | if (type == context) | |
1508 | break; | |
1509 | ||
1510 | types = BINFO_INHERITANCE_CHAIN (types); | |
1511 | /* If the next type was not VIA_PUBLIC, then fields of all | |
1512 | remaining class past that one are private. */ | |
1513 | if (types) | |
1514 | { | |
1515 | if (TREE_VIA_PROTECTED (types)) | |
1516 | visibility = visibility_protected; | |
1517 | else if (! TREE_VIA_PUBLIC (types)) | |
1518 | visibility = visibility_private; | |
1519 | } | |
1520 | } | |
1521 | ||
1522 | /* No special visibilities apply. Use normal rules. | |
1523 | No assignment needed for BASETYPEs here from the nreverse. | |
1524 | This is because we use it only for information about the | |
1525 | path to the base. The code earlier dealt with what | |
1526 | happens when we are at the base level. */ | |
1527 | ||
1528 | if (visibility == visibility_public) | |
1529 | { | |
1530 | basetype_path = reverse_path (basetype_path); | |
1531 | if (TREE_PRIVATE (field)) | |
1532 | PRIVATE_RETURN; | |
1533 | if (TREE_PROTECTED (field)) | |
1534 | { | |
1535 | /* Used to check if the current class type was derived from | |
1536 | the type that contains the field. This is wrong for | |
1537 | multiple inheritance because is gives one class reference | |
1538 | to protected members via another classes protected path. | |
1539 | I.e., if A; B1 : A; B2 : A; Then B1 and B2 can access | |
1540 | their own members which are protected in A, but not | |
1541 | those same members in one another. */ | |
1542 | if (current_class_type | |
1543 | && UNIQUELY_DERIVED_FROM_P (context, current_class_type)) | |
1544 | PUBLIC_RETURN; | |
1545 | PROTECTED_RETURN; | |
1546 | } | |
1547 | PUBLIC_RETURN; | |
1548 | } | |
1549 | ||
1550 | if (visibility == visibility_protected) | |
1551 | { | |
1552 | /* reverse_path? */ | |
1553 | if (TREE_PRIVATE (field)) | |
1554 | PRIVATE_RETURN; | |
1555 | /* We want to make sure that all non-private members in | |
1556 | the current class (as derived) are accessible. */ | |
1557 | if (current_class_type | |
1558 | && UNIQUELY_DERIVED_FROM_P (context, current_class_type)) | |
1559 | PUBLIC_RETURN; | |
1560 | PROTECTED_RETURN; | |
1561 | } | |
1562 | ||
1563 | if (visibility == visibility_private | |
1564 | && current_class_type != NULL_TREE) | |
1565 | { | |
1566 | if (TREE_PRIVATE (field)) | |
1567 | { | |
1568 | reverse_path (basetype_path); | |
1569 | PRIVATE_RETURN; | |
1570 | } | |
1571 | ||
1572 | /* See if the field isn't protected. */ | |
1573 | if (TREE_PROTECTED (field)) | |
1574 | { | |
1575 | tree test = basetype_path; | |
1576 | while (test) | |
1577 | { | |
1578 | if (BINFO_TYPE (test) == current_class_type) | |
1579 | break; | |
1580 | test = BINFO_INHERITANCE_CHAIN (test); | |
1581 | } | |
1582 | reverse_path (basetype_path); | |
1583 | if (test) | |
1584 | PUBLIC_RETURN; | |
1585 | PROTECTED_RETURN; | |
1586 | } | |
1587 | ||
1588 | /* See if the field isn't a public member of | |
1589 | a private base class. */ | |
1590 | ||
1591 | visibility = visibility_public; | |
1592 | types = BINFO_INHERITANCE_CHAIN (basetype_path); | |
1593 | while (types) | |
1594 | { | |
1595 | if (! TREE_VIA_PUBLIC (types)) | |
1596 | { | |
1597 | if (visibility == visibility_private) | |
1598 | { | |
1599 | visibility = visibility_private; | |
1600 | goto ret; | |
1601 | } | |
1602 | visibility = visibility_private; | |
1603 | } | |
1604 | if (BINFO_TYPE (types) == context) | |
1605 | { | |
1606 | visibility = visibility_public; | |
1607 | goto ret; | |
1608 | } | |
1609 | types = BINFO_INHERITANCE_CHAIN (types); | |
1610 | } | |
1611 | my_friendly_abort (95); | |
1612 | } | |
1613 | ||
1614 | ret: | |
1615 | reverse_path (basetype_path); | |
1616 | ||
1617 | if (visibility == visibility_public) | |
1618 | DECL_PUBLIC (field) = 1; | |
1619 | else if (visibility == visibility_protected) | |
1620 | DECL_PROTECTED (field) = 1; | |
1621 | else if (visibility == visibility_private) | |
1622 | DECL_PRIVATE (field) = 1; | |
1623 | else my_friendly_abort (96); | |
1624 | return visibility; | |
1625 | } | |
1626 | ||
1627 | /* Routine to see if the sub-object denoted by the binfo PARENT can be | |
1628 | found as a base class and sub-object of the object denoted by | |
1629 | BINFO. This routine relies upon binfos not being shared, except | |
1630 | for binfos for virtual bases. */ | |
1631 | static int | |
1632 | is_subobject_of_p (parent, binfo) | |
1633 | tree parent, binfo; | |
1634 | { | |
1635 | tree binfos = BINFO_BASETYPES (binfo); | |
1636 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
1637 | ||
1638 | if (parent == binfo) | |
1639 | return 1; | |
1640 | ||
1641 | /* Process and/or queue base types. */ | |
1642 | for (i = 0; i < n_baselinks; i++) | |
1643 | { | |
1644 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
1645 | if (is_subobject_of_p (parent, base_binfo)) | |
1646 | return 1; | |
1647 | } | |
1648 | return 0; | |
1649 | } | |
1650 | ||
1651 | /* See if a one FIELD_DECL hides another. This routine is meant to | |
1652 | correspond to ANSI working paper Sept 17, 1992 10p4. The two | |
1653 | binfos given are the binfos corresponding to the particular places | |
1654 | the FIELD_DECLs are found. This routine relies upon binfos not | |
1655 | being shared, except for virtual bases. */ | |
1656 | static int | |
1657 | hides (hider_binfo, hidee_binfo) | |
1658 | tree hider_binfo, hidee_binfo; | |
1659 | { | |
1660 | /* hider hides hidee, if hider has hidee as a base class and | |
1661 | the instance of hidee is a sub-object of hider. The first | |
1662 | part is always true is the second part is true. | |
1663 | ||
1664 | When hider and hidee are the same (two ways to get to the exact | |
1665 | same member) we consider either one as hiding the other. */ | |
1666 | return is_subobject_of_p (hidee_binfo, hider_binfo); | |
1667 | } | |
1668 | ||
1669 | /* Very similar to lookup_fnfields_1 but it ensures that at least one | |
1670 | function was declared inside the class given by TYPE. It really should | |
1671 | only return functions that match the given TYPE. */ | |
1672 | static int | |
1673 | lookup_fnfields_here (type, name) | |
1674 | tree type, name; | |
1675 | { | |
1676 | int index = lookup_fnfields_1 (type, name); | |
1677 | tree fndecls; | |
1678 | ||
1679 | if (index <= 0) | |
1680 | return index; | |
1681 | fndecls = TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (type), index); | |
1682 | while (fndecls) | |
1683 | { | |
1684 | if (DECL_CLASS_CONTEXT (fndecls) == type) | |
1685 | return index; | |
1686 | fndecls = TREE_CHAIN (fndecls); | |
1687 | } | |
1688 | return -1; | |
1689 | } | |
1690 | ||
1691 | /* Look for a field named NAME in an inheritance lattice dominated by | |
1692 | XBASETYPE. PROTECT is zero if we can avoid computing visibility | |
1693 | information, otherwise it is 1. WANT_TYPE is 1 when we should only | |
1694 | return TYPE_DECLs, if no TYPE_DECL can be found return NULL_TREE. | |
1695 | ||
1696 | It was not clear what should happen if WANT_TYPE is set, and an | |
1697 | ambiguity is found. At least one use (lookup_name) to not see | |
1698 | the error. */ | |
1699 | tree | |
1700 | lookup_field (xbasetype, name, protect, want_type) | |
1701 | register tree xbasetype, name; | |
1702 | int protect, want_type; | |
1703 | { | |
1704 | int head = 0, tail = 0; | |
1705 | tree rval, rval_binfo = NULL_TREE; | |
1706 | tree type, basetype_chain, basetype_path; | |
1707 | enum visibility_type this_v = visibility_default; | |
1708 | tree entry, binfo; | |
1709 | enum visibility_type own_visibility = visibility_default; | |
1710 | int vbase_name_p = VBASE_NAME_P (name); | |
1711 | ||
1712 | /* rval_binfo is the binfo associated with the found member, note, | |
1713 | this can be set with useful information, even when rval is not | |
1714 | set, because it must deal with ALL members, not just non-function | |
1715 | members. It is used for ambiguity checking and the hidden | |
1716 | checks. Whereas rval is only set if a proper (not hidden) | |
1717 | non-function member is found. */ | |
1718 | ||
1719 | /* Things for memoization. */ | |
1720 | char *errstr = 0; | |
1721 | ||
1722 | /* Set this to nonzero if we don't know how to compute | |
1723 | accurate error messages for visibility. */ | |
1724 | int index = MEMOIZED_HASH_FN (name); | |
1725 | ||
1726 | if (TREE_CODE (xbasetype) == TREE_VEC) | |
1727 | basetype_path = xbasetype, type = BINFO_TYPE (xbasetype); | |
1728 | else if (IS_AGGR_TYPE_CODE (TREE_CODE (xbasetype))) | |
1729 | basetype_path = TYPE_BINFO (xbasetype), type = xbasetype; | |
1730 | else my_friendly_abort (97); | |
1731 | ||
1732 | if (CLASSTYPE_MTABLE_ENTRY (type)) | |
1733 | { | |
1734 | tree tem = MEMOIZED_FIELDS (CLASSTYPE_MTABLE_ENTRY (type), index); | |
1735 | ||
1736 | while (tem && TREE_PURPOSE (tem) != name) | |
1737 | { | |
1738 | memoized_fields_searched[0]++; | |
1739 | tem = TREE_CHAIN (tem); | |
1740 | } | |
1741 | if (tem) | |
1742 | { | |
1743 | if (protect && TREE_TYPE (tem)) | |
1744 | { | |
1745 | error (TREE_STRING_POINTER (TREE_TYPE (tem)), | |
1746 | IDENTIFIER_POINTER (name), | |
1747 | TYPE_NAME_STRING (DECL_FIELD_CONTEXT (TREE_VALUE (tem)))); | |
1748 | return error_mark_node; | |
1749 | } | |
1750 | if (TREE_VALUE (tem) == NULL_TREE) | |
1751 | memoized_fast_rejects[0] += 1; | |
1752 | else | |
1753 | memoized_fast_finds[0] += 1; | |
1754 | return TREE_VALUE (tem); | |
1755 | } | |
1756 | } | |
1757 | ||
1758 | #ifdef GATHER_STATISTICS | |
1759 | n_calls_lookup_field++; | |
1760 | #endif | |
1761 | if (protect && flag_memoize_lookups && ! global_bindings_p ()) | |
1762 | entry = make_memoized_table_entry (type, name, 0); | |
1763 | else | |
1764 | entry = 0; | |
1765 | ||
1766 | rval = lookup_field_1 (type, name); | |
1767 | if (rval || lookup_fnfields_here (type, name)>=0) | |
1768 | rval_binfo = basetype_path; | |
1769 | ||
1770 | if (rval && TREE_CODE (rval) != TYPE_DECL && want_type) | |
1771 | rval = NULL_TREE; | |
1772 | ||
1773 | if (rval) | |
1774 | { | |
1775 | if (protect) | |
1776 | { | |
1777 | if (TREE_PRIVATE (rval) | TREE_PROTECTED (rval)) | |
1778 | this_v = compute_visibility (basetype_path, rval); | |
1779 | if (TREE_CODE (rval) == CONST_DECL) | |
1780 | { | |
1781 | if (this_v == visibility_private) | |
1782 | errstr = "enum `%s' is a private value of class `%s'"; | |
1783 | else if (this_v == visibility_protected) | |
1784 | errstr = "enum `%s' is a protected value of class `%s'"; | |
1785 | } | |
1786 | else | |
1787 | { | |
1788 | if (this_v == visibility_private) | |
1789 | errstr = "member `%s' is a private member of class `%s'"; | |
1790 | else if (this_v == visibility_protected) | |
1791 | errstr = "member `%s' is a protected member of class `%s'"; | |
1792 | } | |
1793 | } | |
1794 | ||
1795 | if (entry) | |
1796 | { | |
1797 | if (errstr) | |
1798 | { | |
1799 | /* This depends on behavior of lookup_field_1! */ | |
1800 | tree error_string = my_build_string (errstr); | |
1801 | TREE_TYPE (entry) = error_string; | |
1802 | } | |
1803 | else | |
1804 | { | |
1805 | /* Let entry know there is no problem with this access. */ | |
1806 | TREE_TYPE (entry) = NULL_TREE; | |
1807 | } | |
1808 | TREE_VALUE (entry) = rval; | |
1809 | } | |
1810 | ||
1811 | if (errstr && protect) | |
1812 | { | |
1813 | error (errstr, IDENTIFIER_POINTER (name), TYPE_NAME_STRING (type)); | |
1814 | return error_mark_node; | |
1815 | } | |
1816 | return rval; | |
1817 | } | |
1818 | ||
1819 | basetype_chain = CLASSTYPE_BINFO_AS_LIST (type); | |
1820 | TREE_VIA_PUBLIC (basetype_chain) = 1; | |
1821 | ||
1822 | /* The ambiguity check relies upon breadth first searching. */ | |
1823 | ||
1824 | search_stack = push_search_level (search_stack, &search_obstack); | |
1825 | BINFO_VIA_PUBLIC (basetype_path) = 1; | |
1826 | BINFO_INHERITANCE_CHAIN (basetype_path) = NULL_TREE; | |
1827 | binfo = basetype_path; | |
1828 | ||
1829 | while (1) | |
1830 | { | |
1831 | tree binfos = BINFO_BASETYPES (binfo); | |
1832 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
1833 | tree nval; | |
1834 | ||
1835 | /* Process and/or queue base types. */ | |
1836 | for (i = 0; i < n_baselinks; i++) | |
1837 | { | |
1838 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
1839 | if (BINFO_FIELDS_MARKED (base_binfo) == 0) | |
1840 | { | |
1841 | tree btypes; | |
1842 | ||
1843 | SET_BINFO_FIELDS_MARKED (base_binfo); | |
1844 | btypes = my_tree_cons (NULL_TREE, base_binfo, basetype_chain); | |
1845 | TREE_VIA_PUBLIC (btypes) = TREE_VIA_PUBLIC (base_binfo); | |
1846 | TREE_VIA_PROTECTED (btypes) = TREE_VIA_PROTECTED (base_binfo); | |
1847 | TREE_VIA_VIRTUAL (btypes) = TREE_VIA_VIRTUAL (base_binfo); | |
1848 | obstack_ptr_grow (&search_obstack, btypes); | |
1849 | tail += 1; | |
1850 | if (tail >= search_stack->limit) | |
1851 | my_friendly_abort (98); | |
1852 | } | |
1853 | } | |
1854 | ||
1855 | /* Process head of queue, if one exists. */ | |
1856 | if (head >= tail) | |
1857 | break; | |
1858 | ||
1859 | basetype_chain = search_stack->first[head++]; | |
1860 | basetype_path = TREE_VALUE (basetype_chain); | |
1861 | if (TREE_CHAIN (basetype_chain)) | |
1862 | BINFO_INHERITANCE_CHAIN (basetype_path) = TREE_VALUE (TREE_CHAIN (basetype_chain)); | |
1863 | else | |
1864 | BINFO_INHERITANCE_CHAIN (basetype_path) = NULL_TREE; | |
1865 | ||
1866 | binfo = basetype_path; | |
1867 | type = BINFO_TYPE (binfo); | |
1868 | ||
1869 | /* See if we can find NAME in TYPE. If RVAL is nonzero, | |
1870 | and we do find NAME in TYPE, verify that such a second | |
1871 | sighting is in fact legal. */ | |
1872 | ||
1873 | nval = lookup_field_1 (type, name); | |
1874 | ||
1875 | if (nval || lookup_fnfields_here (type, name)>=0) | |
1876 | { | |
1877 | if (rval_binfo && hides (rval_binfo, binfo)) | |
1878 | { | |
1879 | /* This is ok, the member found is in rval_binfo, not | |
1880 | here (binfo). */ | |
1881 | } | |
1882 | else if (rval_binfo==NULL_TREE || hides (binfo, rval_binfo)) | |
1883 | { | |
1884 | /* This is ok, the member found is here (binfo), not in | |
1885 | rval_binfo. */ | |
1886 | if (nval) | |
1887 | { | |
1888 | rval = nval; | |
1889 | if (entry || protect) | |
1890 | this_v = compute_visibility (basetype_path, rval); | |
1891 | /* These may look ambiguous, but they really are not. */ | |
1892 | if (vbase_name_p) | |
1893 | break; | |
1894 | } | |
1895 | else | |
1896 | { | |
1897 | /* Undo finding it before, as something else hides it. */ | |
1898 | rval = NULL_TREE; | |
1899 | } | |
1900 | rval_binfo = binfo; | |
1901 | } | |
1902 | else | |
1903 | { | |
1904 | /* This is ambiguous. */ | |
1905 | errstr = "request for member `%s' is ambiguous"; | |
1906 | protect = 2; | |
1907 | break; | |
1908 | } | |
1909 | } | |
1910 | } | |
1911 | { | |
1912 | tree *tp = search_stack->first; | |
1913 | tree *search_tail = tp + tail; | |
1914 | ||
1915 | if (entry) | |
1916 | TREE_VALUE (entry) = rval; | |
1917 | ||
1918 | if (want_type && (rval == NULL_TREE || TREE_CODE (rval) != TYPE_DECL)) | |
1919 | { | |
1920 | rval = NULL_TREE; | |
1921 | errstr = 0; | |
1922 | } | |
1923 | ||
1924 | /* If this FIELD_DECL defines its own visibility, deal with that. */ | |
1925 | if (rval && errstr == 0 | |
1926 | && ((protect&1) || entry) | |
1927 | && DECL_LANG_SPECIFIC (rval) | |
1928 | && DECL_VISIBILITY (rval)) | |
1929 | { | |
1930 | while (tp < search_tail) | |
1931 | { | |
1932 | /* If is possible for one of the derived types on the | |
1933 | path to have defined special visibility for this | |
1934 | field. Look for such declarations and report an | |
1935 | error if a conflict is found. */ | |
1936 | enum visibility_type new_v; | |
1937 | ||
1938 | if (this_v != visibility_default) | |
1939 | new_v = compute_visibility (TREE_VALUE (*tp), rval); | |
1940 | if (this_v != visibility_default && new_v != this_v) | |
1941 | { | |
1942 | errstr = "conflicting visibilities to member `%s'"; | |
1943 | this_v = visibility_default; | |
1944 | } | |
1945 | own_visibility = new_v; | |
1946 | CLEAR_BINFO_FIELDS_MARKED (TREE_VALUE (*tp)); | |
1947 | tp += 1; | |
1948 | } | |
1949 | } | |
1950 | else | |
1951 | { | |
1952 | while (tp < search_tail) | |
1953 | { | |
1954 | CLEAR_BINFO_FIELDS_MARKED (TREE_VALUE (*tp)); | |
1955 | tp += 1; | |
1956 | } | |
1957 | } | |
1958 | } | |
1959 | search_stack = pop_search_level (search_stack); | |
1960 | ||
1961 | if (errstr == 0) | |
1962 | { | |
1963 | if (own_visibility == visibility_private) | |
1964 | errstr = "member `%s' declared private"; | |
1965 | else if (own_visibility == visibility_protected) | |
1966 | errstr = "member `%s' declared protected"; | |
1967 | else if (this_v == visibility_private) | |
1968 | errstr = TREE_PRIVATE (rval) | |
1969 | ? "member `%s' is private" | |
1970 | : "member `%s' is from private base class"; | |
1971 | else if (this_v == visibility_protected) | |
1972 | errstr = TREE_PROTECTED (rval) | |
1973 | ? "member `%s' is protected" | |
1974 | : "member `%s' is from protected base class"; | |
1975 | } | |
1976 | ||
1977 | if (entry) | |
1978 | { | |
1979 | if (errstr) | |
1980 | { | |
1981 | tree error_string = my_build_string (errstr); | |
1982 | /* Save error message with entry. */ | |
1983 | TREE_TYPE (entry) = error_string; | |
1984 | } | |
1985 | else | |
1986 | { | |
1987 | /* Mark entry as having no error string. */ | |
1988 | TREE_TYPE (entry) = NULL_TREE; | |
1989 | } | |
1990 | } | |
1991 | ||
1992 | if (errstr && protect) | |
1993 | { | |
1994 | error (errstr, IDENTIFIER_POINTER (name), TYPE_NAME_STRING (type)); | |
1995 | rval = error_mark_node; | |
1996 | } | |
1997 | return rval; | |
1998 | } | |
1999 | ||
2000 | /* Try to find NAME inside a nested class. */ | |
2001 | tree | |
2002 | lookup_nested_field (name, complain) | |
2003 | tree name; | |
2004 | int complain; | |
2005 | { | |
2006 | register tree t; | |
2007 | ||
2008 | tree id = NULL_TREE; | |
2009 | if (TREE_CHAIN (current_class_type)) | |
2010 | { | |
2011 | /* Climb our way up the nested ladder, seeing if we're trying to | |
2012 | modify a field in an enclosing class. If so, we should only | |
2013 | be able to modify if it's static. */ | |
2014 | for (t = TREE_CHAIN (current_class_type); | |
2015 | t && DECL_CONTEXT (t); | |
2016 | t = TREE_CHAIN (DECL_CONTEXT (t))) | |
2017 | { | |
2018 | if (TREE_CODE (DECL_CONTEXT (t)) != RECORD_TYPE) | |
2019 | break; | |
2020 | ||
2021 | /* N.B.: lookup_field will do the visibility checking for us */ | |
2022 | id = lookup_field (DECL_CONTEXT (t), name, complain, 0); | |
2023 | if (id == error_mark_node) | |
2024 | { | |
2025 | id = NULL_TREE; | |
2026 | continue; | |
2027 | } | |
2028 | ||
2029 | if (id != NULL_TREE) | |
2030 | { | |
2031 | if (TREE_CODE (id) == FIELD_DECL | |
2032 | && ! TREE_STATIC (id) | |
2033 | && TREE_TYPE (id) != error_mark_node) | |
2034 | { | |
2035 | if (complain) | |
2036 | { | |
2037 | /* At parse time, we don't want to give this error, since | |
2038 | we won't have enough state to make this kind of | |
2039 | decision properly. But there are times (e.g., with | |
2040 | enums in nested classes) when we do need to call | |
2041 | this fn at parse time. So, in those cases, we pass | |
2042 | complain as a 0 and just return a NULL_TREE. */ | |
2043 | error ("assignment to non-static member `%s' of enclosing class `%s'", | |
2044 | lang_printable_name (id), | |
2045 | IDENTIFIER_POINTER (TYPE_IDENTIFIER | |
2046 | (DECL_CONTEXT (t)))); | |
2047 | /* Mark this for do_identifier(). It would otherwise | |
2048 | claim that the variable was undeclared. */ | |
2049 | TREE_TYPE (id) = error_mark_node; | |
2050 | } | |
2051 | else | |
2052 | { | |
2053 | id = NULL_TREE; | |
2054 | continue; | |
2055 | } | |
2056 | } | |
2057 | break; | |
2058 | } | |
2059 | } | |
2060 | } | |
2061 | ||
2062 | return id; | |
2063 | } | |
2064 | ||
2065 | /* TYPE is a class type. Return the index of the fields within | |
2066 | the method vector with name NAME, or -1 is no such field exists. */ | |
2067 | static int | |
2068 | lookup_fnfields_1 (type, name) | |
2069 | tree type, name; | |
2070 | { | |
2071 | register tree method_vec = CLASSTYPE_METHOD_VEC (type); | |
2072 | ||
2073 | if (method_vec != 0) | |
2074 | { | |
2075 | register tree *methods = &TREE_VEC_ELT (method_vec, 0); | |
2076 | register tree *end = TREE_VEC_END (method_vec); | |
2077 | ||
2078 | #ifdef GATHER_STATISTICS | |
2079 | n_calls_lookup_fnfields_1++; | |
2080 | #endif | |
2081 | if (*methods && name == constructor_name (type)) | |
2082 | return 0; | |
2083 | ||
2084 | while (++methods != end) | |
2085 | { | |
2086 | #ifdef GATHER_STATISTICS | |
2087 | n_outer_fields_searched++; | |
2088 | #endif | |
2089 | if (DECL_NAME (*methods) == name) | |
2090 | break; | |
2091 | } | |
2092 | if (methods != end) | |
2093 | return methods - &TREE_VEC_ELT (method_vec, 0); | |
2094 | } | |
2095 | ||
2096 | return -1; | |
2097 | } | |
2098 | ||
2099 | /* Starting from BASETYPE, return a TREE_BASELINK-like object | |
2100 | which gives the following information (in a list): | |
2101 | ||
2102 | TREE_TYPE: list of basetypes needed to get to... | |
2103 | TREE_VALUE: list of all functions in of given type | |
2104 | which have name NAME. | |
2105 | ||
2106 | No visibility information is computed by this function, | |
2107 | other then to adorn the list of basetypes with | |
2108 | TREE_VIA_PUBLIC. | |
2109 | ||
2110 | If there are two ways to find a name (two members), if COMPLAIN is | |
2111 | non-zero, then error_mark_node is returned, and an error message is | |
2112 | printed, otherwise, just an error_mark_node is returned. | |
2113 | ||
2114 | As a special case, is COMPLAIN is -1, we don't complain, and we | |
2115 | don't return error_mark_node, but rather the complete list of | |
2116 | virtuals. This is used by get_virtuals_named_this. */ | |
2117 | tree | |
2118 | lookup_fnfields (basetype_path, name, complain) | |
2119 | tree basetype_path, name; | |
2120 | int complain; | |
2121 | { | |
2122 | int head = 0, tail = 0; | |
2123 | tree type, rval, rval_binfo = NULL_TREE, rvals = NULL_TREE; | |
2124 | tree entry, binfo, basetype_chain; | |
2125 | int find_all = 0; | |
2126 | ||
2127 | /* rval_binfo is the binfo associated with the found member, note, | |
2128 | this can be set with useful information, even when rval is not | |
2129 | set, because it must deal with ALL members, not just function | |
2130 | members. It is used for ambiguity checking and the hidden | |
2131 | checks. Whereas rval is only set if a proper (not hidden) | |
2132 | function member is found. */ | |
2133 | ||
2134 | /* For now, don't try this. */ | |
2135 | int protect = complain; | |
2136 | ||
2137 | /* Things for memoization. */ | |
2138 | char *errstr = 0; | |
2139 | ||
2140 | /* Set this to nonzero if we don't know how to compute | |
2141 | accurate error messages for visibility. */ | |
2142 | int index = MEMOIZED_HASH_FN (name); | |
2143 | ||
2144 | if (complain == -1) | |
2145 | { | |
2146 | find_all = 1; | |
2147 | protect = complain = 0; | |
2148 | } | |
2149 | ||
2150 | binfo = basetype_path; | |
2151 | type = BINFO_TYPE (basetype_path); | |
2152 | ||
2153 | /* The memoization code is in need of maintenance. */ | |
2154 | if (!find_all && CLASSTYPE_MTABLE_ENTRY (type)) | |
2155 | { | |
2156 | tree tem = MEMOIZED_FNFIELDS (CLASSTYPE_MTABLE_ENTRY (type), index); | |
2157 | ||
2158 | while (tem && TREE_PURPOSE (tem) != name) | |
2159 | { | |
2160 | memoized_fields_searched[1]++; | |
2161 | tem = TREE_CHAIN (tem); | |
2162 | } | |
2163 | if (tem) | |
2164 | { | |
2165 | if (protect && TREE_TYPE (tem)) | |
2166 | { | |
2167 | error (TREE_STRING_POINTER (TREE_TYPE (tem)), | |
2168 | IDENTIFIER_POINTER (name), | |
2169 | TYPE_NAME_STRING (DECL_CLASS_CONTEXT (TREE_VALUE (TREE_VALUE (tem))))); | |
2170 | return error_mark_node; | |
2171 | } | |
2172 | if (TREE_VALUE (tem) == NULL_TREE) | |
2173 | { | |
2174 | memoized_fast_rejects[1] += 1; | |
2175 | return NULL_TREE; | |
2176 | } | |
2177 | else | |
2178 | { | |
2179 | /* Want to return this, but we must make sure | |
2180 | that visibility information is consistent. */ | |
2181 | tree baselink = TREE_VALUE (tem); | |
2182 | tree memoized_basetypes = TREE_PURPOSE (baselink); | |
2183 | tree these_basetypes = basetype_path; | |
2184 | while (memoized_basetypes && these_basetypes) | |
2185 | { | |
2186 | memoized_fields_searched[1]++; | |
2187 | if (TREE_VALUE (memoized_basetypes) != these_basetypes) | |
2188 | break; | |
2189 | memoized_basetypes = TREE_CHAIN (memoized_basetypes); | |
2190 | these_basetypes = BINFO_INHERITANCE_CHAIN (these_basetypes); | |
2191 | } | |
2192 | /* The following statement is true only when both are NULL. */ | |
2193 | if (memoized_basetypes == these_basetypes) | |
2194 | { | |
2195 | memoized_fast_finds[1] += 1; | |
2196 | return TREE_VALUE (tem); | |
2197 | } | |
2198 | /* else, we must re-find this field by hand. */ | |
2199 | baselink = tree_cons (basetype_path, TREE_VALUE (baselink), TREE_CHAIN (baselink)); | |
2200 | return baselink; | |
2201 | } | |
2202 | } | |
2203 | } | |
2204 | ||
2205 | #ifdef GATHER_STATISTICS | |
2206 | n_calls_lookup_fnfields++; | |
2207 | #endif | |
2208 | if (protect && flag_memoize_lookups && ! global_bindings_p ()) | |
2209 | entry = make_memoized_table_entry (type, name, 1); | |
2210 | else | |
2211 | entry = 0; | |
2212 | ||
2213 | index = lookup_fnfields_here (type, name); | |
2214 | if (index >= 0 || lookup_field_1 (type, name)) | |
2215 | rval_binfo = basetype_path; | |
2216 | ||
2217 | if (index >= 0) | |
2218 | { | |
2219 | rval = TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (type), index); | |
2220 | rvals = my_tree_cons (basetype_path, rval, rvals); | |
2221 | if (BINFO_BASETYPES (binfo) && CLASSTYPE_BASELINK_VEC (type)) | |
2222 | TREE_TYPE (rvals) = TREE_VEC_ELT (CLASSTYPE_BASELINK_VEC (type), index); | |
2223 | ||
2224 | if (entry) | |
2225 | { | |
2226 | TREE_VALUE (entry) = rvals; | |
2227 | TREE_TYPE (entry) = NULL_TREE; | |
2228 | } | |
2229 | ||
2230 | if (errstr && protect) | |
2231 | { | |
2232 | error (errstr, IDENTIFIER_POINTER (name), TYPE_NAME_STRING (type)); | |
2233 | return error_mark_node; | |
2234 | } | |
2235 | return rvals; | |
2236 | } | |
2237 | rval = NULL_TREE; | |
2238 | ||
2239 | basetype_chain = CLASSTYPE_BINFO_AS_LIST (type); | |
2240 | TREE_VIA_PUBLIC (basetype_chain) = 1; | |
2241 | ||
2242 | /* The ambiguity check relies upon breadth first searching. */ | |
2243 | ||
2244 | search_stack = push_search_level (search_stack, &search_obstack); | |
2245 | BINFO_VIA_PUBLIC (basetype_path) = 1; | |
2246 | BINFO_INHERITANCE_CHAIN (basetype_path) = NULL_TREE; | |
2247 | binfo = basetype_path; | |
2248 | ||
2249 | while (1) | |
2250 | { | |
2251 | tree binfos = BINFO_BASETYPES (binfo); | |
2252 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
2253 | int index; | |
2254 | ||
2255 | /* Process and/or queue base types. */ | |
2256 | for (i = 0; i < n_baselinks; i++) | |
2257 | { | |
2258 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
2259 | if (BINFO_FIELDS_MARKED (base_binfo) == 0) | |
2260 | { | |
2261 | tree btypes; | |
2262 | ||
2263 | SET_BINFO_FIELDS_MARKED (base_binfo); | |
2264 | btypes = my_tree_cons (NULL_TREE, base_binfo, basetype_chain); | |
2265 | TREE_VIA_PUBLIC (btypes) = TREE_VIA_PUBLIC (base_binfo); | |
2266 | TREE_VIA_PROTECTED (btypes) = TREE_VIA_PROTECTED (base_binfo); | |
2267 | TREE_VIA_VIRTUAL (btypes) = TREE_VIA_VIRTUAL (base_binfo); | |
2268 | obstack_ptr_grow (&search_obstack, btypes); | |
2269 | tail += 1; | |
2270 | if (tail >= search_stack->limit) | |
2271 | my_friendly_abort (99); | |
2272 | } | |
2273 | } | |
2274 | ||
2275 | /* Process head of queue, if one exists. */ | |
2276 | if (head >= tail) | |
2277 | break; | |
2278 | ||
2279 | basetype_chain = search_stack->first[head++]; | |
2280 | basetype_path = TREE_VALUE (basetype_chain); | |
2281 | if (TREE_CHAIN (basetype_chain)) | |
2282 | BINFO_INHERITANCE_CHAIN (basetype_path) = TREE_VALUE (TREE_CHAIN (basetype_chain)); | |
2283 | else | |
2284 | BINFO_INHERITANCE_CHAIN (basetype_path) = NULL_TREE; | |
2285 | ||
2286 | binfo = basetype_path; | |
2287 | type = BINFO_TYPE (binfo); | |
2288 | ||
2289 | /* See if we can find NAME in TYPE. If RVAL is nonzero, | |
2290 | and we do find NAME in TYPE, verify that such a second | |
2291 | sighting is in fact legal. */ | |
2292 | ||
2293 | index = lookup_fnfields_here (type, name); | |
2294 | ||
2295 | if (index >= 0 || (lookup_field_1 (type, name)!=NULL_TREE && !find_all)) | |
2296 | { | |
2297 | if (rval_binfo && !find_all && hides (rval_binfo, binfo)) | |
2298 | { | |
2299 | /* This is ok, the member found is in rval_binfo, not | |
2300 | here (binfo). */ | |
2301 | } | |
2302 | else if (rval_binfo==NULL_TREE || find_all || hides (binfo, rval_binfo)) | |
2303 | { | |
2304 | /* This is ok, the member found is here (binfo), not in | |
2305 | rval_binfo. */ | |
2306 | if (index >= 0) | |
2307 | { | |
2308 | rval = TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (type), index); | |
2309 | /* Note, rvals can only be previously set if find_all is | |
2310 | true. */ | |
2311 | rvals = my_tree_cons (basetype_path, rval, rvals); | |
2312 | if (TYPE_BINFO_BASETYPES (type) | |
2313 | && CLASSTYPE_BASELINK_VEC (type)) | |
2314 | TREE_TYPE (rvals) = TREE_VEC_ELT (CLASSTYPE_BASELINK_VEC (type), index); | |
2315 | } | |
2316 | else | |
2317 | { | |
2318 | /* Undo finding it before, as something else hides it. */ | |
2319 | rval = NULL_TREE; | |
2320 | rvals = NULL_TREE; | |
2321 | } | |
2322 | rval_binfo = binfo; | |
2323 | } | |
2324 | else | |
2325 | { | |
2326 | /* This is ambiguous. */ | |
2327 | errstr = "request for member `%s' is ambiguous"; | |
2328 | rvals = error_mark_node; | |
2329 | break; | |
2330 | } | |
2331 | } | |
2332 | } | |
2333 | { | |
2334 | tree *tp = search_stack->first; | |
2335 | tree *search_tail = tp + tail; | |
2336 | ||
2337 | while (tp < search_tail) | |
2338 | { | |
2339 | CLEAR_BINFO_FIELDS_MARKED (TREE_VALUE (*tp)); | |
2340 | tp += 1; | |
2341 | } | |
2342 | } | |
2343 | search_stack = pop_search_level (search_stack); | |
2344 | ||
2345 | if (entry) | |
2346 | { | |
2347 | if (errstr) | |
2348 | { | |
2349 | tree error_string = my_build_string (errstr); | |
2350 | /* Save error message with entry. */ | |
2351 | TREE_TYPE (entry) = error_string; | |
2352 | } | |
2353 | else | |
2354 | { | |
2355 | /* Mark entry as having no error string. */ | |
2356 | TREE_TYPE (entry) = NULL_TREE; | |
2357 | TREE_VALUE (entry) = rvals; | |
2358 | } | |
2359 | } | |
2360 | ||
2361 | if (errstr && protect) | |
2362 | { | |
2363 | error (errstr, IDENTIFIER_POINTER (name), TYPE_NAME_STRING (type)); | |
2364 | rvals = error_mark_node; | |
2365 | } | |
2366 | ||
2367 | return rvals; | |
2368 | } | |
2369 | \f | |
2370 | /* BREADTH-FIRST SEARCH ROUTINES. */ | |
2371 | ||
2372 | /* Search a multiple inheritance hierarchy by breadth-first search. | |
2373 | ||
2374 | TYPE is an aggregate type, possibly in a multiple-inheritance hierarchy. | |
2375 | TESTFN is a function, which, if true, means that our condition has been met, | |
2376 | and its return value should be returned. | |
2377 | QFN, if non-NULL, is a predicate dictating whether the type should | |
2378 | even be queued. */ | |
2379 | ||
2380 | HOST_WIDE_INT | |
2381 | breadth_first_search (binfo, testfn, qfn) | |
2382 | tree binfo; | |
2383 | int (*testfn)(); | |
2384 | int (*qfn)(); | |
2385 | { | |
2386 | int head = 0, tail = 0; | |
2387 | int rval = 0; | |
2388 | ||
2389 | search_stack = push_search_level (search_stack, &search_obstack); | |
2390 | ||
2391 | while (1) | |
2392 | { | |
2393 | tree binfos = BINFO_BASETYPES (binfo); | |
2394 | int n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
2395 | int i; | |
2396 | ||
2397 | /* Process and/or queue base types. */ | |
2398 | for (i = 0; i < n_baselinks; i++) | |
2399 | { | |
2400 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
2401 | ||
2402 | if (BINFO_MARKED (base_binfo) == 0 | |
2403 | && (qfn == 0 || (*qfn) (binfo, i))) | |
2404 | { | |
2405 | SET_BINFO_MARKED (base_binfo); | |
2406 | obstack_ptr_grow (&search_obstack, binfo); | |
2407 | obstack_ptr_grow (&search_obstack, (HOST_WIDE_INT) i); | |
2408 | tail += 2; | |
2409 | if (tail >= search_stack->limit) | |
2410 | my_friendly_abort (100); | |
2411 | } | |
2412 | } | |
2413 | /* Process head of queue, if one exists. */ | |
2414 | if (head >= tail) | |
2415 | { | |
2416 | rval = 0; | |
2417 | break; | |
2418 | } | |
2419 | ||
2420 | binfo = search_stack->first[head++]; | |
2421 | i = (int) search_stack->first[head++]; | |
2422 | if (rval = (*testfn) (binfo, i)) | |
2423 | break; | |
2424 | binfo = BINFO_BASETYPE (binfo, i); | |
2425 | } | |
2426 | { | |
2427 | tree *tp = search_stack->first; | |
2428 | tree *search_tail = tp + tail; | |
2429 | while (tp < search_tail) | |
2430 | { | |
2431 | tree binfo = *tp++; | |
2432 | int i = (HOST_WIDE_INT)(*tp++); | |
2433 | CLEAR_BINFO_MARKED (BINFO_BASETYPE (binfo, i)); | |
2434 | } | |
2435 | } | |
2436 | ||
2437 | search_stack = pop_search_level (search_stack); | |
2438 | return rval; | |
2439 | } | |
2440 | ||
2441 | /* Functions to use in breadth first searches. */ | |
2442 | typedef tree (*pft)(); | |
2443 | typedef int (*pfi)(); | |
2444 | ||
2445 | int tree_needs_constructor_p (binfo, i) | |
2446 | tree binfo; | |
2447 | int i; | |
2448 | { | |
2449 | tree basetype; | |
2450 | my_friendly_assert (i != 0, 296); | |
2451 | basetype = BINFO_TYPE (BINFO_BASETYPE (binfo, i)); | |
2452 | return TYPE_NEEDS_CONSTRUCTOR (basetype); | |
2453 | } | |
2454 | ||
2455 | static tree declarator; | |
2456 | ||
2457 | static tree | |
2458 | get_virtuals_named_this (binfo) | |
2459 | tree binfo; | |
2460 | { | |
2461 | tree fields; | |
2462 | ||
2463 | fields = lookup_fnfields (binfo, declarator, -1); | |
2464 | /* fields cannot be error_mark_node */ | |
2465 | ||
2466 | if (fields == 0) | |
2467 | return 0; | |
2468 | ||
2469 | /* Get to the function decls, and return the first virtual function | |
2470 | with this name, if there is one. */ | |
2471 | while (fields) | |
2472 | { | |
2473 | tree fndecl; | |
2474 | ||
2475 | for (fndecl = TREE_VALUE (fields); fndecl; fndecl = DECL_CHAIN (fndecl)) | |
2476 | if (DECL_VINDEX (fndecl)) | |
2477 | return fields; | |
2478 | fields = next_baselink (fields); | |
2479 | } | |
2480 | return NULL_TREE; | |
2481 | } | |
2482 | ||
2483 | static tree get_virtual_destructor (binfo, i) | |
2484 | tree binfo; | |
2485 | int i; | |
2486 | { | |
2487 | tree type = BINFO_TYPE (binfo); | |
2488 | if (i >= 0) | |
2489 | type = BINFO_TYPE (TREE_VEC_ELT (BINFO_BASETYPES (binfo), i)); | |
2490 | if (TYPE_HAS_DESTRUCTOR (type) | |
2491 | && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (type), 0))) | |
2492 | return TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (type), 0); | |
2493 | return 0; | |
2494 | } | |
2495 | ||
2496 | int tree_has_any_destructor_p (binfo, i) | |
2497 | tree binfo; | |
2498 | int i; | |
2499 | { | |
2500 | tree type = BINFO_TYPE (binfo); | |
2501 | if (i >= 0) | |
2502 | type = BINFO_TYPE (TREE_VEC_ELT (BINFO_BASETYPES (binfo), i)); | |
2503 | return TYPE_NEEDS_DESTRUCTOR (type); | |
2504 | } | |
2505 | ||
2506 | /* Given a class type TYPE, and a function decl FNDECL, | |
2507 | look for the first function the TYPE's hierarchy which | |
2508 | FNDECL could match as a virtual function. | |
2509 | ||
2510 | DTORP is nonzero if we are looking for a destructor. Destructors | |
2511 | need special treatment because they do not match by name. */ | |
2512 | tree | |
2513 | get_first_matching_virtual (binfo, fndecl, dtorp) | |
2514 | tree binfo, fndecl; | |
2515 | int dtorp; | |
2516 | { | |
2517 | tree tmp = NULL_TREE; | |
2518 | ||
2519 | /* Breadth first search routines start searching basetypes | |
2520 | of TYPE, so we must perform first ply of search here. */ | |
2521 | if (dtorp) | |
2522 | { | |
2523 | if (tree_has_any_destructor_p (binfo, -1)) | |
2524 | tmp = get_virtual_destructor (binfo, -1); | |
2525 | ||
2526 | if (tmp) | |
2527 | { | |
2528 | if (get_base_distance (DECL_CONTEXT (tmp), | |
2529 | DECL_CONTEXT (fndecl), 0, 0) > 0) | |
2530 | DECL_CONTEXT (fndecl) = DECL_CONTEXT (tmp); | |
2531 | return tmp; | |
2532 | } | |
2533 | ||
2534 | tmp = (tree) breadth_first_search (binfo, | |
2535 | (pfi) get_virtual_destructor, | |
2536 | tree_has_any_destructor_p); | |
2537 | if (tmp) | |
2538 | { | |
2539 | if (get_base_distance (DECL_CONTEXT (tmp), | |
2540 | DECL_CONTEXT (fndecl), 0, 0) > 0) | |
2541 | DECL_CONTEXT (fndecl) = DECL_CONTEXT (tmp); | |
2542 | } | |
2543 | return tmp; | |
2544 | } | |
2545 | else | |
2546 | { | |
2547 | tree drettype, dtypes, btypes, instptr_type; | |
2548 | tree basetype = DECL_CLASS_CONTEXT (fndecl); | |
2549 | tree baselink, best = NULL_TREE; | |
2550 | tree name = DECL_ASSEMBLER_NAME (fndecl); | |
2551 | ||
2552 | declarator = DECL_NAME (fndecl); | |
2553 | if (IDENTIFIER_VIRTUAL_P (declarator) == 0) | |
2554 | return NULL_TREE; | |
2555 | ||
2556 | drettype = TREE_TYPE (TREE_TYPE (fndecl)); | |
2557 | dtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); | |
2558 | if (DECL_STATIC_FUNCTION_P (fndecl)) | |
2559 | instptr_type = NULL_TREE; | |
2560 | else | |
2561 | instptr_type = TREE_TYPE (TREE_VALUE (dtypes)); | |
2562 | ||
2563 | for (baselink = get_virtuals_named_this (binfo); | |
2564 | baselink; baselink = next_baselink (baselink)) | |
2565 | { | |
2566 | for (tmp = TREE_VALUE (baselink); tmp; tmp = DECL_CHAIN (tmp)) | |
2567 | { | |
2568 | if (! DECL_VINDEX (tmp)) | |
2569 | continue; | |
2570 | ||
2571 | btypes = TYPE_ARG_TYPES (TREE_TYPE (tmp)); | |
2572 | if (instptr_type == NULL_TREE) | |
2573 | { | |
2574 | if (compparms (TREE_CHAIN (btypes), dtypes, 3)) | |
2575 | /* Caller knows to give error in this case. */ | |
2576 | return tmp; | |
2577 | return NULL_TREE; | |
2578 | } | |
2579 | ||
2580 | if ((TYPE_READONLY (TREE_TYPE (TREE_VALUE (btypes))) | |
2581 | == TYPE_READONLY (instptr_type)) | |
2582 | && compparms (TREE_CHAIN (btypes), TREE_CHAIN (dtypes), 3)) | |
2583 | { | |
2584 | if (IDENTIFIER_ERROR_LOCUS (name) == NULL_TREE | |
2585 | && ! comptypes (TREE_TYPE (TREE_TYPE (tmp)), drettype, 1)) | |
2586 | { | |
2587 | error_with_decl (fndecl, "conflicting return type specified for virtual function `%s'"); | |
2588 | SET_IDENTIFIER_ERROR_LOCUS (name, basetype); | |
2589 | } | |
2590 | break; | |
2591 | } | |
2592 | } | |
2593 | if (tmp) | |
2594 | { | |
2595 | /* If this is ambiguous, we will warn about it later. */ | |
2596 | if (best) | |
2597 | { | |
2598 | if (get_base_distance (DECL_CLASS_CONTEXT (best), | |
2599 | DECL_CLASS_CONTEXT (tmp), 0, 0) > 0) | |
2600 | best = tmp; | |
2601 | } | |
2602 | else | |
2603 | best = tmp; | |
2604 | } | |
2605 | } | |
2606 | if (IDENTIFIER_ERROR_LOCUS (name) == NULL_TREE | |
2607 | && best == NULL_TREE && warn_overloaded_virtual) | |
2608 | { | |
2609 | warning_with_decl (fndecl, | |
2610 | "conflicting specification deriving virtual function `%s'"); | |
2611 | SET_IDENTIFIER_ERROR_LOCUS (name, basetype); | |
2612 | } | |
2613 | if (best) | |
2614 | { | |
2615 | if (get_base_distance (DECL_CONTEXT (best), | |
2616 | DECL_CONTEXT (fndecl), 0, 0) > 0) | |
2617 | DECL_CONTEXT (fndecl) = DECL_CONTEXT (best); | |
2618 | } | |
2619 | return best; | |
2620 | } | |
2621 | } | |
2622 | ||
2623 | /* Return the list of virtual functions which are abstract in type TYPE. | |
2624 | This information is cached, and so must be built on a | |
2625 | non-temporary obstack. */ | |
2626 | tree | |
2627 | get_abstract_virtuals (type) | |
2628 | tree type; | |
2629 | { | |
2630 | /* For each layer of base class (i.e., the first base class, and each | |
2631 | virtual base class from that one), modify the virtual function table | |
2632 | of the derived class to contain the new virtual function. | |
2633 | A class has as many vfields as it has virtual base classes (total). */ | |
2634 | tree vfields, vbases, base, tmp; | |
2635 | tree vfield = CLASSTYPE_VFIELD (type); | |
2636 | tree fcontext = vfield ? DECL_FCONTEXT (vfield) : NULL_TREE; | |
2637 | tree abstract_virtuals = CLASSTYPE_ABSTRACT_VIRTUALS (type); | |
2638 | ||
2639 | for (vfields = CLASSTYPE_VFIELDS (type); vfields; vfields = TREE_CHAIN (vfields)) | |
2640 | { | |
2641 | int normal; | |
2642 | ||
2643 | /* This code is most likely wrong, and probably only works for single | |
2644 | inheritance or by accident. */ | |
2645 | ||
2646 | /* Find the right base class for this derived class, call it BASE. */ | |
2647 | base = VF_BASETYPE_VALUE (vfields); | |
2648 | if (base == type) | |
2649 | continue; | |
2650 | ||
2651 | /* We call this case NORMAL iff this virtual function table | |
2652 | pointer field has its storage reserved in this class. | |
2653 | This is normally the case without virtual baseclasses | |
2654 | or off-center multiple baseclasses. */ | |
2655 | normal = (base == fcontext | |
2656 | && (VF_BINFO_VALUE (vfields) == NULL_TREE | |
2657 | || ! TREE_VIA_VIRTUAL (VF_BINFO_VALUE (vfields)))); | |
2658 | ||
2659 | if (normal) | |
2660 | tmp = TREE_CHAIN (TYPE_BINFO_VIRTUALS (type)); | |
2661 | else | |
2662 | { | |
2663 | /* n.b.: VF_BASETYPE_VALUE (vfields) is the first basetype | |
2664 | that provides the virtual function table, whereas | |
2665 | VF_DERIVED_VALUE (vfields) is an immediate base type of TYPE | |
2666 | that dominates VF_BASETYPE_VALUE (vfields). The list of | |
2667 | vfields we want lies between these two values. */ | |
2668 | tree binfo = get_binfo (VF_NORMAL_VALUE (vfields), type, 0); | |
2669 | tmp = TREE_CHAIN (BINFO_VIRTUALS (binfo)); | |
2670 | } | |
2671 | ||
2672 | /* Get around dossier entry if there is one. */ | |
2673 | if (flag_dossier) | |
2674 | tmp = TREE_CHAIN (tmp); | |
2675 | ||
2676 | while (tmp) | |
2677 | { | |
2678 | tree base_pfn = FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (tmp)); | |
2679 | tree base_fndecl = TREE_OPERAND (base_pfn, 0); | |
2680 | if (DECL_ABSTRACT_VIRTUAL_P (base_fndecl)) | |
2681 | abstract_virtuals = tree_cons (NULL_TREE, base_fndecl, abstract_virtuals); | |
2682 | tmp = TREE_CHAIN (tmp); | |
2683 | } | |
2684 | } | |
2685 | for (vbases = CLASSTYPE_VBASECLASSES (type); vbases; vbases = TREE_CHAIN (vbases)) | |
2686 | { | |
2687 | if (! BINFO_VIRTUALS (vbases)) | |
2688 | continue; | |
2689 | ||
2690 | tmp = TREE_CHAIN (BINFO_VIRTUALS (vbases)); | |
2691 | while (tmp) | |
2692 | { | |
2693 | tree base_pfn = FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (tmp)); | |
2694 | tree base_fndecl = TREE_OPERAND (base_pfn, 0); | |
2695 | if (DECL_ABSTRACT_VIRTUAL_P (base_fndecl)) | |
2696 | abstract_virtuals = tree_cons (NULL_TREE, base_fndecl, abstract_virtuals); | |
2697 | tmp = TREE_CHAIN (tmp); | |
2698 | } | |
2699 | } | |
2700 | return nreverse (abstract_virtuals); | |
2701 | } | |
2702 | ||
2703 | /* For the type TYPE, return a list of member functions available from | |
2704 | base classes with name NAME. The TREE_VALUE of the list is a chain of | |
2705 | member functions with name NAME. The TREE_PURPOSE of the list is a | |
2706 | basetype, or a list of base types (in reverse order) which were | |
2707 | traversed to reach the chain of member functions. If we reach a base | |
2708 | type which provides a member function of name NAME, and which has at | |
2709 | most one base type itself, then we can terminate the search. */ | |
2710 | ||
2711 | tree | |
2712 | get_baselinks (type_as_binfo_list, type, name) | |
2713 | tree type_as_binfo_list; | |
2714 | tree type, name; | |
2715 | { | |
2716 | int head = 0, tail = 0, index; | |
2717 | tree rval = 0, nval = 0; | |
2718 | tree basetypes = type_as_binfo_list; | |
2719 | tree binfo = TYPE_BINFO (type); | |
2720 | ||
2721 | search_stack = push_search_level (search_stack, &search_obstack); | |
2722 | ||
2723 | while (1) | |
2724 | { | |
2725 | tree binfos = BINFO_BASETYPES (binfo); | |
2726 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
2727 | ||
2728 | /* Process and/or queue base types. */ | |
2729 | for (i = 0; i < n_baselinks; i++) | |
2730 | { | |
2731 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
2732 | tree btypes; | |
2733 | ||
2734 | btypes = hash_tree_cons (TREE_VIA_PUBLIC (base_binfo), | |
2735 | TREE_VIA_VIRTUAL (base_binfo), | |
2736 | TREE_VIA_PROTECTED (base_binfo), | |
2737 | NULL_TREE, base_binfo, | |
2738 | basetypes); | |
2739 | obstack_ptr_grow (&search_obstack, btypes); | |
2740 | search_stack->first = (tree *)obstack_base (&search_obstack); | |
2741 | tail += 1; | |
2742 | } | |
2743 | ||
2744 | dont_queue: | |
2745 | /* Process head of queue, if one exists. */ | |
2746 | if (head >= tail) | |
2747 | break; | |
2748 | ||
2749 | basetypes = search_stack->first[head++]; | |
2750 | binfo = TREE_VALUE (basetypes); | |
2751 | type = BINFO_TYPE (binfo); | |
2752 | index = lookup_fnfields_1 (type, name); | |
2753 | if (index >= 0) | |
2754 | { | |
2755 | nval = TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (type), index); | |
2756 | rval = hash_tree_cons (0, 0, 0, basetypes, nval, rval); | |
2757 | if (TYPE_BINFO_BASETYPES (type) == 0) | |
2758 | goto dont_queue; | |
2759 | else if (TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (type)) == 1) | |
2760 | { | |
2761 | if (CLASSTYPE_BASELINK_VEC (type)) | |
2762 | TREE_TYPE (rval) = TREE_VEC_ELT (CLASSTYPE_BASELINK_VEC (type), index); | |
2763 | goto dont_queue; | |
2764 | } | |
2765 | } | |
2766 | nval = NULL_TREE; | |
2767 | } | |
2768 | ||
2769 | search_stack = pop_search_level (search_stack); | |
2770 | return rval; | |
2771 | } | |
2772 | ||
2773 | tree | |
2774 | next_baselink (baselink) | |
2775 | tree baselink; | |
2776 | { | |
2777 | tree tmp = TREE_TYPE (baselink); | |
2778 | baselink = TREE_CHAIN (baselink); | |
2779 | while (tmp) | |
2780 | { | |
2781 | /* @@ does not yet add previous base types. */ | |
2782 | baselink = tree_cons (TREE_PURPOSE (tmp), TREE_VALUE (tmp), | |
2783 | baselink); | |
2784 | TREE_TYPE (baselink) = TREE_TYPE (tmp); | |
2785 | tmp = TREE_CHAIN (tmp); | |
2786 | } | |
2787 | return baselink; | |
2788 | } | |
2789 | \f | |
2790 | /* DEPTH-FIRST SEARCH ROUTINES. */ | |
2791 | ||
2792 | /* Assign unique numbers to _CLASSTYPE members of the lattice | |
2793 | specified by TYPE. The root nodes are marked first; the nodes | |
2794 | are marked depth-fisrt, left-right. */ | |
2795 | ||
2796 | static int cid; | |
2797 | ||
2798 | /* Matrix implementing a relation from CLASSTYPE X CLASSTYPE => INT. | |
2799 | Relation yields 1 if C1 <= C2, 0 otherwise. */ | |
2800 | typedef char mi_boolean; | |
2801 | static mi_boolean *mi_matrix; | |
2802 | ||
2803 | /* Type for which this matrix is defined. */ | |
2804 | static tree mi_type; | |
2805 | ||
2806 | /* Size of the matrix for indexing purposes. */ | |
2807 | static int mi_size; | |
2808 | ||
2809 | /* Return nonzero if class C2 derives from class C1. */ | |
2810 | #define BINFO_DERIVES_FROM(C1, C2) \ | |
2811 | ((mi_matrix+mi_size*(BINFO_CID (C1)-1))[BINFO_CID (C2)-1]) | |
2812 | #define TYPE_DERIVES_FROM(C1, C2) \ | |
2813 | ((mi_matrix+mi_size*(CLASSTYPE_CID (C1)-1))[CLASSTYPE_CID (C2)-1]) | |
2814 | #define BINFO_DERIVES_FROM_STAR(C) \ | |
2815 | (mi_matrix+(BINFO_CID (C)-1)) | |
2816 | ||
2817 | /* This routine converts a pointer to be a pointer of an immediate | |
2818 | base class. The normal convert_pointer_to routine would diagnose | |
2819 | the conversion as ambiguous, under MI code that has the base class | |
2820 | as an ambiguous base class. */ | |
2821 | static tree | |
2822 | convert_pointer_to_single_level (to_type, expr) | |
2823 | tree to_type, expr; | |
2824 | { | |
2825 | tree binfo_of_derived; | |
2826 | tree last; | |
2827 | ||
2828 | binfo_of_derived = TYPE_BINFO (TREE_TYPE (TREE_TYPE (expr))); | |
2829 | last = get_binfo (to_type, TREE_TYPE (TREE_TYPE (expr)), 0); | |
2830 | BINFO_INHERITANCE_CHAIN (last) = binfo_of_derived; | |
2831 | BINFO_INHERITANCE_CHAIN (binfo_of_derived) = NULL_TREE; | |
2832 | return build_vbase_path (PLUS_EXPR, TYPE_POINTER_TO (to_type), expr, last, 1); | |
2833 | } | |
2834 | ||
2835 | /* The main function which implements depth first search. | |
2836 | ||
2837 | This routine has to remember the path it walked up, when | |
2838 | dfs_init_vbase_pointers is the work function, as otherwise there | |
2839 | would be no record. */ | |
2840 | static void | |
2841 | dfs_walk (binfo, fn, qfn) | |
2842 | tree binfo; | |
2843 | void (*fn)(); | |
2844 | int (*qfn)(); | |
2845 | { | |
2846 | tree binfos = BINFO_BASETYPES (binfo); | |
2847 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
2848 | ||
2849 | for (i = 0; i < n_baselinks; i++) | |
2850 | { | |
2851 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
2852 | ||
2853 | if ((*qfn)(base_binfo)) | |
2854 | { | |
2855 | #define NEW_CONVERT 1 | |
2856 | #if NEW_CONVERT | |
2857 | if (fn == dfs_init_vbase_pointers) | |
2858 | { | |
2859 | /* When traversing an arbitrary MI hierarchy, we need to keep | |
2860 | a record of the path we took to get down to the final base | |
2861 | type, as otherwise there would be no record of it, and just | |
2862 | trying to blindly convert at the bottom would be ambiguous. | |
2863 | ||
2864 | The easiest way is to do the conversions one step at a time, | |
2865 | as we know we want the immediate base class at each step. | |
2866 | ||
2867 | The only special trick to converting one step at a time, | |
2868 | is that when we hit the last virtual base class, we must | |
2869 | use the SLOT value for it, and not use the normal convert | |
2870 | routine. We use the last virtual base class, as in our | |
2871 | implementation, we have pointers to all virtual base | |
2872 | classes in the base object. */ | |
2873 | ||
2874 | tree saved_vbase_decl_ptr_intermediate | |
2875 | = vbase_decl_ptr_intermediate; | |
2876 | ||
2877 | if (TREE_VIA_VIRTUAL (base_binfo)) | |
2878 | { | |
2879 | /* No need for the conversion here, as we know it is the | |
2880 | right type. */ | |
2881 | vbase_decl_ptr_intermediate = | |
2882 | (tree)CLASSTYPE_SEARCH_SLOT (BINFO_TYPE (base_binfo)); | |
2883 | } | |
2884 | else | |
2885 | { | |
2886 | #ifdef CHECK_convert_pointer_to_single_level | |
2887 | /* This code here introduces a little software fault | |
2888 | tolerance It should be that case that the second | |
2889 | one always gets the same valid answer that the | |
2890 | first one gives, if the first one gives a valid | |
2891 | answer. | |
2892 | ||
2893 | If it doesn't, the second algorithm is at fault | |
2894 | and needs to be fixed. | |
2895 | ||
2896 | The first one is known to be bad and produce | |
2897 | error_mark_node when dealing with MI base | |
2898 | classes. It is the only problem supposed to be | |
2899 | fixed by the second. */ | |
2900 | #endif | |
2901 | tree vdpi1, vdpi2; | |
2902 | ||
2903 | #ifdef CHECK_convert_pointer_to_single_level | |
2904 | vdpi1 = convert_pointer_to (BINFO_TYPE (base_binfo), | |
2905 | vbase_decl_ptr_intermediate); | |
2906 | #endif | |
2907 | vdpi2 = convert_pointer_to_single_level (BINFO_TYPE (base_binfo), | |
2908 | vbase_decl_ptr_intermediate); | |
2909 | vbase_decl_ptr_intermediate = vdpi2; | |
2910 | #ifdef CHECK_convert_pointer_to_single_level | |
2911 | if (vdpi1 == error_mark_node && vdpi2 != vdpi1) | |
2912 | { | |
2913 | extern int errorcount; | |
2914 | errorcount -=2; | |
2915 | warning ("internal: Don't worry, be happy, I can fix tangs man. (ignore above error)"); | |
2916 | } | |
2917 | else if (simple_cst_equal (vdpi1, vdpi2) != 1) { | |
2918 | if (simple_cst_equal (vdpi1, vdpi2) == 0) | |
2919 | warning ("internal: convert_pointer_to_single_level: They are not the same, going with old algorithm"); | |
2920 | else | |
2921 | warning ("internal: convert_pointer_to_single_level: They might not be the same, going with old algorithm"); | |
2922 | vbase_decl_ptr_intermediate = vdpi1; | |
2923 | } | |
2924 | #endif | |
2925 | } | |
2926 | ||
2927 | dfs_walk (base_binfo, fn, qfn); | |
2928 | ||
2929 | vbase_decl_ptr_intermediate = saved_vbase_decl_ptr_intermediate; | |
2930 | } else | |
2931 | #endif | |
2932 | dfs_walk (base_binfo, fn, qfn); | |
2933 | } | |
2934 | } | |
2935 | ||
2936 | fn (binfo); | |
2937 | } | |
2938 | ||
2939 | /* Predicate functions which serve for dfs_walk. */ | |
2940 | static int numberedp (binfo) tree binfo; | |
2941 | { return BINFO_CID (binfo); } | |
2942 | static int unnumberedp (binfo) tree binfo; | |
2943 | { return BINFO_CID (binfo) == 0; } | |
2944 | ||
2945 | static int markedp (binfo) tree binfo; | |
2946 | { return BINFO_MARKED (binfo); } | |
2947 | static int bfs_markedp (binfo, i) tree binfo; int i; | |
2948 | { return BINFO_MARKED (BINFO_BASETYPE (binfo, i)); } | |
2949 | static int unmarkedp (binfo) tree binfo; | |
2950 | { return BINFO_MARKED (binfo) == 0; } | |
2951 | static int bfs_unmarkedp (binfo, i) tree binfo; int i; | |
2952 | { return BINFO_MARKED (BINFO_BASETYPE (binfo, i)) == 0; } | |
2953 | static int marked_vtable_pathp (binfo) tree binfo; | |
2954 | { return BINFO_VTABLE_PATH_MARKED (binfo); } | |
2955 | static int bfs_marked_vtable_pathp (binfo, i) tree binfo; int i; | |
2956 | { return BINFO_VTABLE_PATH_MARKED (BINFO_BASETYPE (binfo, i)); } | |
2957 | static int unmarked_vtable_pathp (binfo) tree binfo; | |
2958 | { return BINFO_VTABLE_PATH_MARKED (binfo) == 0; } | |
2959 | static int bfs_unmarked_vtable_pathp (binfo, i) tree binfo; int i; | |
2960 | { return BINFO_VTABLE_PATH_MARKED (BINFO_BASETYPE (binfo, i)) == 0; } | |
2961 | static int marked_new_vtablep (binfo) tree binfo; | |
2962 | { return BINFO_NEW_VTABLE_MARKED (binfo); } | |
2963 | static int bfs_marked_new_vtablep (binfo, i) tree binfo; int i; | |
2964 | { return BINFO_NEW_VTABLE_MARKED (BINFO_BASETYPE (binfo, i)); } | |
2965 | static int unmarked_new_vtablep (binfo) tree binfo; | |
2966 | { return BINFO_NEW_VTABLE_MARKED (binfo) == 0; } | |
2967 | static int bfs_unmarked_new_vtablep (binfo, i) tree binfo; int i; | |
2968 | { return BINFO_NEW_VTABLE_MARKED (BINFO_BASETYPE (binfo, i)) == 0; } | |
2969 | ||
2970 | static int dfs_search_slot_nonempty_p (binfo) tree binfo; | |
2971 | { return CLASSTYPE_SEARCH_SLOT (BINFO_TYPE (binfo)) != 0; } | |
2972 | ||
2973 | static int dfs_debug_unmarkedp (binfo) tree binfo; | |
2974 | { return CLASSTYPE_DEBUG_REQUESTED (BINFO_TYPE (binfo)) == 0; } | |
2975 | ||
2976 | /* The worker functions for `dfs_walk'. These do not need to | |
2977 | test anything (vis a vis marking) if they are paired with | |
2978 | a predicate function (above). */ | |
2979 | ||
2980 | /* Assign each type within the lattice a number which is unique | |
2981 | in the lattice. The first number assigned is 1. */ | |
2982 | ||
2983 | static void | |
2984 | dfs_number (binfo) | |
2985 | tree binfo; | |
2986 | { | |
2987 | BINFO_CID (binfo) = ++cid; | |
2988 | } | |
2989 | ||
2990 | static void | |
2991 | dfs_unnumber (binfo) | |
2992 | tree binfo; | |
2993 | { | |
2994 | BINFO_CID (binfo) = 0; | |
2995 | } | |
2996 | ||
2997 | static void | |
2998 | dfs_mark (binfo) tree binfo; | |
2999 | { SET_BINFO_MARKED (binfo); } | |
3000 | ||
3001 | static void | |
3002 | dfs_unmark (binfo) tree binfo; | |
3003 | { CLEAR_BINFO_MARKED (binfo); } | |
3004 | ||
3005 | static void | |
3006 | dfs_mark_vtable_path (binfo) tree binfo; | |
3007 | { SET_BINFO_VTABLE_PATH_MARKED (binfo); } | |
3008 | ||
3009 | static void | |
3010 | dfs_unmark_vtable_path (binfo) tree binfo; | |
3011 | { CLEAR_BINFO_VTABLE_PATH_MARKED (binfo); } | |
3012 | ||
3013 | static void | |
3014 | dfs_mark_new_vtable (binfo) tree binfo; | |
3015 | { SET_BINFO_NEW_VTABLE_MARKED (binfo); } | |
3016 | ||
3017 | static void | |
3018 | dfs_unmark_new_vtable (binfo) tree binfo; | |
3019 | { CLEAR_BINFO_NEW_VTABLE_MARKED (binfo); } | |
3020 | ||
3021 | static void | |
3022 | dfs_clear_search_slot (binfo) tree binfo; | |
3023 | { CLASSTYPE_SEARCH_SLOT (BINFO_TYPE (binfo)) = 0; } | |
3024 | ||
3025 | static void | |
3026 | dfs_debug_mark (binfo) | |
3027 | tree binfo; | |
3028 | { | |
3029 | tree t = BINFO_TYPE (binfo); | |
3030 | ||
3031 | /* Use heuristic that if there are virtual functions, | |
3032 | ignore until we see a non-inline virtual function. */ | |
3033 | tree methods = CLASSTYPE_METHOD_VEC (t); | |
3034 | ||
3035 | CLASSTYPE_DEBUG_REQUESTED (t) = 1; | |
3036 | ||
3037 | /* If interface info is known, the value of (?@@?) is correct. */ | |
3038 | if (methods == 0 | |
3039 | || ! CLASSTYPE_INTERFACE_UNKNOWN (t) | |
3040 | || (write_virtuals == 2 && TYPE_VIRTUAL_P (t))) | |
3041 | return; | |
3042 | ||
3043 | /* If debug info is requested from this context for this type, supply it. | |
3044 | If debug info is requested from another context for this type, | |
3045 | see if some third context can supply it. */ | |
3046 | if (current_function_decl == NULL_TREE | |
3047 | || DECL_CLASS_CONTEXT (current_function_decl) != t) | |
3048 | { | |
3049 | if (TREE_VEC_ELT (methods, 0)) | |
3050 | methods = TREE_VEC_ELT (methods, 0); | |
3051 | else | |
3052 | methods = TREE_VEC_ELT (methods, 1); | |
3053 | while (methods) | |
3054 | { | |
3055 | if (DECL_VINDEX (methods) | |
3056 | && DECL_SAVED_INSNS (methods) == 0 | |
3057 | && DECL_PENDING_INLINE_INFO (methods) == 0 | |
3058 | && DECL_ABSTRACT_VIRTUAL_P (methods) == 0) | |
3059 | { | |
3060 | /* Somebody, somewhere is going to have to define this | |
3061 | virtual function. When they do, they will provide | |
3062 | the debugging info. */ | |
3063 | return; | |
3064 | } | |
3065 | methods = TREE_CHAIN (methods); | |
3066 | } | |
3067 | } | |
3068 | /* We cannot rely on some alien method to solve our problems, | |
3069 | so we must write out the debug info ourselves. */ | |
3070 | DECL_IGNORED_P (TYPE_NAME (t)) = 0; | |
3071 | if (! TREE_ASM_WRITTEN (TYPE_NAME (t))) | |
3072 | rest_of_type_compilation (t, global_bindings_p ()); | |
3073 | } | |
3074 | \f | |
3075 | /* Attach to the type of the virtual base class, the pointer to the | |
3076 | virtual base class, given the global pointer vbase_decl_ptr. */ | |
3077 | static void | |
3078 | dfs_find_vbases (binfo) | |
3079 | tree binfo; | |
3080 | { | |
3081 | tree binfos = BINFO_BASETYPES (binfo); | |
3082 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
3083 | ||
3084 | for (i = n_baselinks-1; i >= 0; i--) | |
3085 | { | |
3086 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
3087 | ||
3088 | if (TREE_VIA_VIRTUAL (base_binfo) | |
3089 | && CLASSTYPE_SEARCH_SLOT (BINFO_TYPE (base_binfo)) == 0) | |
3090 | { | |
3091 | tree vbase = BINFO_TYPE (base_binfo); | |
3092 | tree binfo = binfo_member (vbase, vbase_types); | |
3093 | ||
3094 | CLASSTYPE_SEARCH_SLOT (vbase) | |
3095 | = (char *) build (PLUS_EXPR, TYPE_POINTER_TO (vbase), | |
3096 | vbase_decl_ptr, BINFO_OFFSET (binfo)); | |
3097 | } | |
3098 | } | |
3099 | SET_BINFO_VTABLE_PATH_MARKED (binfo); | |
3100 | SET_BINFO_NEW_VTABLE_MARKED (binfo); | |
3101 | } | |
3102 | ||
3103 | static void | |
3104 | dfs_init_vbase_pointers (binfo) | |
3105 | tree binfo; | |
3106 | { | |
3107 | tree type = BINFO_TYPE (binfo); | |
3108 | tree fields = TYPE_FIELDS (type); | |
3109 | tree path, this_vbase_ptr; | |
3110 | int distance; | |
3111 | ||
3112 | CLEAR_BINFO_VTABLE_PATH_MARKED (binfo); | |
3113 | ||
3114 | /* If there is a dossier, it is the first field, though perhaps from | |
3115 | the base class. Otherwise, the first fields are virtual base class | |
3116 | pointer fields. */ | |
3117 | if (CLASSTYPE_DOSSIER (type) && VFIELD_NAME_P (DECL_NAME (fields))) | |
3118 | /* Get past vtable for the object. */ | |
3119 | fields = TREE_CHAIN (fields); | |
3120 | ||
3121 | if (fields == NULL_TREE | |
3122 | || DECL_NAME (fields) == NULL_TREE | |
3123 | || ! VBASE_NAME_P (DECL_NAME (fields))) | |
3124 | return; | |
3125 | ||
3126 | #if NEW_CONVERT | |
3127 | this_vbase_ptr = vbase_decl_ptr_intermediate; | |
3128 | ||
3129 | if (TYPE_POINTER_TO (type) != TREE_TYPE (this_vbase_ptr)) | |
3130 | my_friendly_abort (125); | |
3131 | #endif | |
3132 | ||
3133 | #if NEW_CONVERT == 0 | |
3134 | distance = get_base_distance (type, TREE_TYPE (vbase_decl), 0, &path); | |
3135 | if (distance == -2) | |
3136 | { | |
3137 | error ("inheritance lattice too complex below"); | |
3138 | } | |
3139 | while (path) | |
3140 | { | |
3141 | if (TREE_VIA_VIRTUAL (path)) | |
3142 | break; | |
3143 | distance -= 1; | |
3144 | path = BINFO_INHERITANCE_CHAIN (path); | |
3145 | } | |
3146 | ||
3147 | if (distance > 0) | |
3148 | this_vbase_ptr = convert_pointer_to (type, (tree)CLASSTYPE_SEARCH_SLOT (BINFO_TYPE (path))); | |
3149 | else | |
3150 | this_vbase_ptr = convert_pointer_to (type, vbase_decl_ptr); | |
3151 | ||
3152 | /* This happens when it is ambiguous. */ | |
3153 | if (this_vbase_ptr == error_mark_node) | |
3154 | return; | |
3155 | #endif | |
3156 | ||
3157 | while (fields && DECL_NAME (fields) | |
3158 | && VBASE_NAME_P (DECL_NAME (fields))) | |
3159 | { | |
3160 | tree ref = build (COMPONENT_REF, TREE_TYPE (fields), | |
3161 | build_indirect_ref (this_vbase_ptr, 0), fields); | |
3162 | tree init = (tree)CLASSTYPE_SEARCH_SLOT (TREE_TYPE (TREE_TYPE (fields))); | |
3163 | vbase_init_result = tree_cons (binfo_member (TREE_TYPE (TREE_TYPE (fields)), | |
3164 | vbase_types), | |
3165 | build_modify_expr (ref, NOP_EXPR, init), | |
3166 | vbase_init_result); | |
3167 | fields = TREE_CHAIN (fields); | |
3168 | } | |
3169 | } | |
3170 | ||
3171 | /* Sometimes this needs to clear both VTABLE_PATH and NEW_VTABLE. Other | |
3172 | times, just NEW_VTABLE, but optimizer should make both with equal | |
3173 | efficiency (though it does not currently). */ | |
3174 | static void | |
3175 | dfs_clear_vbase_slots (binfo) | |
3176 | tree binfo; | |
3177 | { | |
3178 | tree type = BINFO_TYPE (binfo); | |
3179 | CLASSTYPE_SEARCH_SLOT (type) = 0; | |
3180 | CLEAR_BINFO_VTABLE_PATH_MARKED (binfo); | |
3181 | CLEAR_BINFO_NEW_VTABLE_MARKED (binfo); | |
3182 | } | |
3183 | ||
3184 | tree | |
3185 | init_vbase_pointers (type, decl_ptr) | |
3186 | tree type; | |
3187 | tree decl_ptr; | |
3188 | { | |
3189 | if (TYPE_USES_VIRTUAL_BASECLASSES (type)) | |
3190 | { | |
3191 | int old_flag = flag_this_is_variable; | |
3192 | tree binfo = TYPE_BINFO (type); | |
3193 | flag_this_is_variable = -2; | |
3194 | vbase_types = CLASSTYPE_VBASECLASSES (type); | |
3195 | vbase_decl_ptr = decl_ptr; | |
3196 | vbase_decl = build_indirect_ref (decl_ptr, 0); | |
3197 | vbase_decl_ptr_intermediate = vbase_decl_ptr; | |
3198 | vbase_init_result = NULL_TREE; | |
3199 | dfs_walk (binfo, dfs_find_vbases, unmarked_vtable_pathp); | |
3200 | dfs_walk (binfo, dfs_init_vbase_pointers, marked_vtable_pathp); | |
3201 | dfs_walk (binfo, dfs_clear_vbase_slots, marked_new_vtablep); | |
3202 | flag_this_is_variable = old_flag; | |
3203 | return vbase_init_result; | |
3204 | } | |
3205 | return 0; | |
3206 | } | |
3207 | ||
3208 | /* Build a COMPOUND_EXPR which when expanded will generate the code | |
3209 | needed to initialize all the virtual function table slots of all | |
3210 | the virtual baseclasses. FOR_TYPE is the type which determines the | |
3211 | virtual baseclasses to use; TYPE is the type of the object to which | |
3212 | the initialization applies. TRUE_EXP is the true object we are | |
3213 | initializing, and DECL_PTR is the pointer to the sub-object we | |
3214 | are initializing. | |
3215 | ||
3216 | CTOR_P is non-zero if the caller of this function is a top-level | |
3217 | constructor. It is zero when called from a destructor. When | |
3218 | non-zero, we can use computed offsets to store the vtables. When | |
3219 | zero, we must store new vtables through virtual baseclass pointers. */ | |
3220 | ||
3221 | tree | |
3222 | build_vbase_vtables_init (main_binfo, binfo, true_exp, decl_ptr, ctor_p) | |
3223 | tree main_binfo, binfo; | |
3224 | tree true_exp, decl_ptr; | |
3225 | int ctor_p; | |
3226 | { | |
3227 | tree for_type = BINFO_TYPE (main_binfo); | |
3228 | tree type = BINFO_TYPE (binfo); | |
3229 | if (TYPE_USES_VIRTUAL_BASECLASSES (type)) | |
3230 | { | |
3231 | int old_flag = flag_this_is_variable; | |
3232 | tree vtable_init_result = NULL_TREE; | |
3233 | tree vbases = CLASSTYPE_VBASECLASSES (type); | |
3234 | ||
3235 | vbase_types = CLASSTYPE_VBASECLASSES (for_type); | |
3236 | vbase_decl_ptr = true_exp ? build_unary_op (ADDR_EXPR, true_exp, 0) : decl_ptr; | |
3237 | vbase_decl = true_exp ? true_exp : build_indirect_ref (decl_ptr, 0); | |
3238 | ||
3239 | if (ctor_p) | |
3240 | { | |
3241 | /* This is an object of type IN_TYPE, */ | |
3242 | flag_this_is_variable = -2; | |
3243 | dfs_walk (main_binfo, dfs_find_vbases, unmarked_new_vtablep); | |
3244 | } | |
3245 | ||
3246 | /* Initialized with vtables of type TYPE. */ | |
3247 | while (vbases) | |
3248 | { | |
3249 | /* This time through, not every class's vtable | |
3250 | is going to be initialized. That is, we only initialize | |
3251 | the "last" vtable pointer. */ | |
3252 | ||
3253 | if (CLASSTYPE_VSIZE (BINFO_TYPE (vbases))) | |
3254 | { | |
3255 | tree addr; | |
3256 | tree vtbl = BINFO_VTABLE (vbases); | |
3257 | tree init = build_unary_op (ADDR_EXPR, vtbl, 0); | |
3258 | assemble_external (vtbl); | |
3259 | TREE_USED (vtbl) = 1; | |
3260 | ||
3261 | if (ctor_p == 0) | |
3262 | addr = convert_pointer_to (vbases, vbase_decl_ptr); | |
3263 | else | |
3264 | addr = (tree)CLASSTYPE_SEARCH_SLOT (BINFO_TYPE (vbases)); | |
3265 | ||
3266 | if (addr) | |
3267 | { | |
3268 | tree ref = build_vfield_ref (build_indirect_ref (addr, 0), | |
3269 | BINFO_TYPE (vbases)); | |
3270 | init = convert_force (TREE_TYPE (ref), init); | |
3271 | vtable_init_result = tree_cons (NULL_TREE, build_modify_expr (ref, NOP_EXPR, init), | |
3272 | vtable_init_result); | |
3273 | } | |
3274 | } | |
3275 | vbases = TREE_CHAIN (vbases); | |
3276 | } | |
3277 | ||
3278 | dfs_walk (binfo, dfs_clear_vbase_slots, marked_new_vtablep); | |
3279 | ||
3280 | flag_this_is_variable = old_flag; | |
3281 | if (vtable_init_result) | |
3282 | return build_compound_expr (vtable_init_result); | |
3283 | } | |
3284 | return error_mark_node; | |
3285 | } | |
3286 | ||
3287 | void | |
3288 | clear_search_slots (type) | |
3289 | tree type; | |
3290 | { | |
3291 | dfs_walk (TYPE_BINFO (type), | |
3292 | dfs_clear_search_slot, dfs_search_slot_nonempty_p); | |
3293 | } | |
3294 | ||
3295 | static void | |
3296 | dfs_get_vbase_types (binfo) | |
3297 | tree binfo; | |
3298 | { | |
3299 | int i; | |
3300 | tree binfos = BINFO_BASETYPES (binfo); | |
3301 | tree type = BINFO_TYPE (binfo); | |
3302 | tree these_vbase_types = CLASSTYPE_VBASECLASSES (type); | |
3303 | ||
3304 | if (these_vbase_types) | |
3305 | { | |
3306 | while (these_vbase_types) | |
3307 | { | |
3308 | tree this_type = BINFO_TYPE (these_vbase_types); | |
3309 | ||
3310 | /* We really need to start from a fresh copy of this | |
3311 | virtual basetype! CLASSTYPE_MARKED2 is the shortcut | |
3312 | for BINFO_VBASE_MARKED. */ | |
3313 | if (! CLASSTYPE_MARKED2 (this_type)) | |
3314 | { | |
3315 | vbase_types = make_binfo (integer_zero_node, | |
3316 | this_type, | |
3317 | TYPE_BINFO_VTABLE (this_type), | |
3318 | TYPE_BINFO_VIRTUALS (this_type), | |
3319 | vbase_types); | |
3320 | TREE_VIA_VIRTUAL (vbase_types) = 1; | |
3321 | SET_CLASSTYPE_MARKED2 (this_type); | |
3322 | } | |
3323 | these_vbase_types = TREE_CHAIN (these_vbase_types); | |
3324 | } | |
3325 | } | |
3326 | else for (i = binfos ? TREE_VEC_LENGTH (binfos)-1 : -1; i >= 0; i--) | |
3327 | { | |
3328 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
3329 | if (TREE_VIA_VIRTUAL (base_binfo) && ! BINFO_VBASE_MARKED (base_binfo)) | |
3330 | { | |
3331 | vbase_types = make_binfo (integer_zero_node, BINFO_TYPE (base_binfo), | |
3332 | BINFO_VTABLE (base_binfo), | |
3333 | BINFO_VIRTUALS (base_binfo), vbase_types); | |
3334 | TREE_VIA_VIRTUAL (vbase_types) = 1; | |
3335 | SET_BINFO_VBASE_MARKED (base_binfo); | |
3336 | } | |
3337 | } | |
3338 | SET_BINFO_MARKED (binfo); | |
3339 | } | |
3340 | ||
3341 | /* Some virtual baseclasses might be virtual baseclasses for | |
3342 | other virtual baseclasses. We sort the virtual baseclasses | |
3343 | topologically: in the list returned, the first virtual base | |
3344 | classes have no virtual baseclasses themselves, and any entry | |
3345 | on the list has no dependency on virtual base classes later in the | |
3346 | list. */ | |
3347 | tree | |
3348 | get_vbase_types (type) | |
3349 | tree type; | |
3350 | { | |
3351 | tree ordered_vbase_types = NULL_TREE, prev, next; | |
3352 | tree vbases; | |
3353 | ||
3354 | vbase_types = NULL_TREE; | |
3355 | dfs_walk (TYPE_BINFO (type), dfs_get_vbase_types, unmarkedp); | |
3356 | dfs_walk (TYPE_BINFO (type), dfs_unmark, markedp); | |
3357 | ||
3358 | while (vbase_types) | |
3359 | { | |
3360 | /* Now sort these types. This is essentially a bubble merge. */ | |
3361 | ||
3362 | /* Farm out virtual baseclasses which have no marked ancestors. */ | |
3363 | for (vbases = vbase_types, prev = NULL_TREE; | |
3364 | vbases; vbases = next) | |
3365 | { | |
3366 | next = TREE_CHAIN (vbases); | |
3367 | /* If VBASES does not have any vbases itself, or it's | |
3368 | topologically safe, it goes into the sorted list. */ | |
3369 | if (! CLASSTYPE_VBASECLASSES (BINFO_TYPE (vbases)) | |
3370 | || BINFO_VBASE_MARKED (vbases) == 0) | |
3371 | { | |
3372 | if (prev) | |
3373 | TREE_CHAIN (prev) = TREE_CHAIN (vbases); | |
3374 | else | |
3375 | vbase_types = TREE_CHAIN (vbases); | |
3376 | TREE_CHAIN (vbases) = NULL_TREE; | |
3377 | ordered_vbase_types = chainon (ordered_vbase_types, vbases); | |
3378 | CLEAR_BINFO_VBASE_MARKED (vbases); | |
3379 | } | |
3380 | else | |
3381 | prev = vbases; | |
3382 | } | |
3383 | ||
3384 | /* Now unmark types all of whose ancestors are now on the | |
3385 | `ordered_vbase_types' list. */ | |
3386 | for (vbases = vbase_types; vbases; vbases = TREE_CHAIN (vbases)) | |
3387 | { | |
3388 | /* If all our virtual baseclasses are unmarked, ok. */ | |
3389 | tree t = CLASSTYPE_VBASECLASSES (BINFO_TYPE (vbases)); | |
3390 | while (t && (BINFO_VBASE_MARKED (t) == 0 | |
3391 | || ! CLASSTYPE_VBASECLASSES (BINFO_TYPE (t)))) | |
3392 | t = TREE_CHAIN (t); | |
3393 | if (t == NULL_TREE) | |
3394 | CLEAR_BINFO_VBASE_MARKED (vbases); | |
3395 | } | |
3396 | } | |
3397 | ||
3398 | return ordered_vbase_types; | |
3399 | } | |
3400 | \f | |
3401 | static void | |
3402 | dfs_record_inheritance (binfo) | |
3403 | tree binfo; | |
3404 | { | |
3405 | tree binfos = BINFO_BASETYPES (binfo); | |
3406 | int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0; | |
3407 | mi_boolean *derived_row = BINFO_DERIVES_FROM_STAR (binfo); | |
3408 | ||
3409 | for (i = n_baselinks-1; i >= 0; i--) | |
3410 | { | |
3411 | int j; | |
3412 | tree base_binfo = TREE_VEC_ELT (binfos, i); | |
3413 | tree baseclass = BINFO_TYPE (base_binfo); | |
3414 | mi_boolean *base_row = BINFO_DERIVES_FROM_STAR (base_binfo); | |
3415 | ||
3416 | /* Don't search if there's nothing there! MI_SIZE can be | |
3417 | zero as a result of parse errors. */ | |
3418 | if (TYPE_BINFO_BASETYPES (baseclass) && mi_size > 0) | |
3419 | for (j = mi_size*(CLASSTYPE_CID (baseclass)-1); j >= 0; j -= mi_size) | |
3420 | derived_row[j] |= base_row[j]; | |
3421 | TYPE_DERIVES_FROM (baseclass, BINFO_TYPE (binfo)) = 1; | |
3422 | } | |
3423 | ||
3424 | SET_BINFO_MARKED (binfo); | |
3425 | } | |
3426 | ||
3427 | /* Given a _CLASSTYPE node in a multiple inheritance lattice, | |
3428 | convert the lattice into a simple relation such that, | |
3429 | given to CIDs, C1 and C2, one can determine if C1 <= C2 | |
3430 | or C2 <= C1 or C1 <> C2. | |
3431 | ||
3432 | Once constructed, we walk the lattice depth fisrt, | |
3433 | applying various functions to elements as they are encountered. | |
3434 | ||
3435 | We use xmalloc here, in case we want to randomly free these tables. */ | |
3436 | ||
3437 | #define SAVE_MI_MATRIX | |
3438 | ||
3439 | void | |
3440 | build_mi_matrix (type) | |
3441 | tree type; | |
3442 | { | |
3443 | tree binfo = TYPE_BINFO (type); | |
3444 | cid = 0; | |
3445 | ||
3446 | #ifdef SAVE_MI_MATRIX | |
3447 | if (CLASSTYPE_MI_MATRIX (type)) | |
3448 | { | |
3449 | mi_size = CLASSTYPE_N_SUPERCLASSES (type) + CLASSTYPE_N_VBASECLASSES (type); | |
3450 | mi_matrix = CLASSTYPE_MI_MATRIX (type); | |
3451 | mi_type = type; | |
3452 | dfs_walk (binfo, dfs_number, unnumberedp); | |
3453 | return; | |
3454 | } | |
3455 | #endif | |
3456 | ||
3457 | mi_size = CLASSTYPE_N_SUPERCLASSES (type) + CLASSTYPE_N_VBASECLASSES (type); | |
3458 | mi_matrix = (char *)xmalloc ((mi_size+1) * (mi_size+1)); | |
3459 | mi_type = type; | |
3460 | bzero (mi_matrix, mi_size * mi_size); | |
3461 | dfs_walk (binfo, dfs_number, unnumberedp); | |
3462 | dfs_walk (binfo, dfs_record_inheritance, unmarkedp); | |
3463 | dfs_walk (binfo, dfs_unmark, markedp); | |
3464 | } | |
3465 | ||
3466 | void | |
3467 | free_mi_matrix () | |
3468 | { | |
3469 | dfs_walk (TYPE_BINFO (mi_type), dfs_unnumber, numberedp); | |
3470 | ||
3471 | #ifdef SAVE_MI_MATRIX | |
3472 | CLASSTYPE_MI_MATRIX (mi_type) = mi_matrix; | |
3473 | #else | |
3474 | free (mi_matrix); | |
3475 | mi_size = 0; | |
3476 | cid = 0; | |
3477 | #endif | |
3478 | } | |
3479 | ||
3480 | /* Local variables for detecting ambiguities of virtual functions | |
3481 | when two or more classes are joined at a multiple inheritance | |
3482 | seam. */ | |
3483 | typedef struct | |
3484 | { | |
3485 | tree decl; | |
3486 | tree args; | |
3487 | tree ptr; | |
3488 | } mi_ventry; | |
3489 | static mi_ventry *mi_vmatrix; | |
3490 | static int *mi_vmax; | |
3491 | static int mi_vrows, mi_vcols; | |
3492 | #define MI_VMATRIX(ROW,COL) ((mi_vmatrix + (ROW)*mi_vcols)[COL]) | |
3493 | ||
3494 | /* Build a table of virtual functions for a multiple-inheritance | |
3495 | structure. Here, there are N base classes, and at most | |
3496 | M entries per class. | |
3497 | ||
3498 | This function does nothing if N is 0 or 1. */ | |
3499 | void | |
3500 | build_mi_virtuals (rows, cols) | |
3501 | int rows, cols; | |
3502 | { | |
3503 | if (rows < 2 || cols == 0) | |
3504 | return; | |
3505 | mi_vrows = rows; | |
3506 | mi_vcols = cols; | |
3507 | mi_vmatrix = (mi_ventry *)xmalloc ((rows+1) * cols * sizeof (mi_ventry)); | |
3508 | mi_vmax = (int *)xmalloc ((rows+1) * sizeof (int)); | |
3509 | ||
3510 | bzero (mi_vmax, rows * sizeof (int)); | |
3511 | ||
3512 | /* Row indices start at 1, so adjust this. */ | |
3513 | mi_vmatrix -= cols; | |
3514 | mi_vmax -= 1; | |
3515 | } | |
3516 | ||
3517 | /* Comparison function for ordering virtual function table entries. */ | |
3518 | static int | |
3519 | rank_mi_virtuals (v1, v2) | |
3520 | mi_ventry *v1, *v2; | |
3521 | { | |
3522 | tree p1, p2; | |
3523 | int i; | |
3524 | ||
3525 | i = (long) (DECL_NAME (v1->decl)) - (long) (DECL_NAME (v2->decl)); | |
3526 | if (i) | |
3527 | return i; | |
3528 | p1 = v1->args; | |
3529 | p2 = v2->args; | |
3530 | ||
3531 | if (p1 == p2) | |
3532 | return 0; | |
3533 | ||
3534 | while (p1 && p2) | |
3535 | { | |
3536 | i = ((long) (TREE_VALUE (p1)) - (long) (TREE_VALUE (p2))); | |
3537 | if (i) | |
3538 | return i; | |
3539 | ||
3540 | if (TREE_CHAIN (p1)) | |
3541 | { | |
3542 | if (! TREE_CHAIN (p2)) | |
3543 | return 1; | |
3544 | p1 = TREE_CHAIN (p1); | |
3545 | p2 = TREE_CHAIN (p2); | |
3546 | } | |
3547 | else if (TREE_CHAIN (p2)) | |
3548 | return -1; | |
3549 | else | |
3550 | { | |
3551 | /* When matches of argument lists occur, pick lowest | |
3552 | address to keep searching time to a minimum on | |
3553 | later passes--like hashing, only different. | |
3554 | *MUST BE STABLE*. */ | |
3555 | if ((long) (v2->args) < (long) (v1->args)) | |
3556 | v1->args = v2->args; | |
3557 | else | |
3558 | v2->args = v1->args; | |
3559 | return 0; | |
3560 | } | |
3561 | } | |
3562 | return 0; | |
3563 | } | |
3564 | ||
3565 | /* Install the virtuals functions got from the initializer VIRTUALS to | |
3566 | the table at index ROW. */ | |
3567 | void | |
3568 | add_mi_virtuals (row, virtuals) | |
3569 | int row; | |
3570 | tree virtuals; | |
3571 | { | |
3572 | int col = 0; | |
3573 | ||
3574 | if (mi_vmatrix == 0) | |
3575 | return; | |
3576 | while (virtuals) | |
3577 | { | |
3578 | tree decl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0); | |
3579 | MI_VMATRIX (row, col).decl = decl; | |
3580 | MI_VMATRIX (row, col).args = FUNCTION_ARG_CHAIN (decl); | |
3581 | MI_VMATRIX (row, col).ptr = TREE_VALUE (virtuals); | |
3582 | virtuals = TREE_CHAIN (virtuals); | |
3583 | col += 1; | |
3584 | } | |
3585 | mi_vmax[row] = col; | |
3586 | ||
3587 | qsort (mi_vmatrix + row * mi_vcols, | |
3588 | col, | |
3589 | sizeof (mi_ventry), | |
3590 | rank_mi_virtuals); | |
3591 | } | |
3592 | ||
3593 | /* If joining two types results in an ambiguity in the virtual | |
3594 | function table, report such here. */ | |
3595 | void | |
3596 | report_ambiguous_mi_virtuals (rows, type) | |
3597 | int rows; | |
3598 | tree type; | |
3599 | { | |
3600 | int *mi_vmin; | |
3601 | int row1, col1, row, col; | |
3602 | ||
3603 | if (mi_vmatrix == 0) | |
3604 | return; | |
3605 | ||
3606 | /* Now virtuals are all sorted, so we merge to find ambiguous cases. */ | |
3607 | mi_vmin = (int *)alloca ((rows+1) * sizeof (int)); | |
3608 | bzero (mi_vmin, rows * sizeof (int)); | |
3609 | ||
3610 | /* adjust. */ | |
3611 | mi_vmin -= 1; | |
3612 | ||
3613 | /* For each base class with virtual functions (and this includes views | |
3614 | of the virtual baseclasses from different base classes), see that | |
3615 | each virtual function in that base class has a unique meet. | |
3616 | ||
3617 | When the column loop is finished, THIS_DECL is in fact the meet. | |
3618 | If that value does not appear in the virtual function table for | |
3619 | the row, install it. This happens when that virtual function comes | |
3620 | from a virtual baseclass, or a non-leftmost baseclass. */ | |
3621 | ||
3622 | for (row1 = 1; row1 < rows; row1++) | |
3623 | { | |
3624 | tree this_decl = 0; | |
3625 | ||
3626 | for (col1 = mi_vmax[row1]-1; col1 >= mi_vmin[row1]; col1--) | |
3627 | { | |
3628 | tree these_args = MI_VMATRIX (row1, col1).args; | |
3629 | tree this_context; | |
3630 | ||
3631 | this_decl = MI_VMATRIX (row1, col1).decl; | |
3632 | if (this_decl == 0) | |
3633 | continue; | |
3634 | this_context = TYPE_BINFO (DECL_CLASS_CONTEXT (this_decl)); | |
3635 | ||
3636 | if (this_context != TYPE_BINFO (type)) | |
3637 | this_context = get_binfo (this_context, type, 0); | |
3638 | ||
3639 | for (row = row1+1; row <= rows; row++) | |
3640 | for (col = mi_vmax[row]-1; col >= mi_vmin[row]; col--) | |
3641 | { | |
3642 | mi_ventry this_entry; | |
3643 | ||
3644 | if (MI_VMATRIX (row, col).decl == 0) | |
3645 | continue; | |
3646 | ||
3647 | this_entry.decl = this_decl; | |
3648 | this_entry.args = these_args; | |
3649 | this_entry.ptr = MI_VMATRIX (row1, col1).ptr; | |
3650 | if (rank_mi_virtuals (&this_entry, &MI_VMATRIX (row, col)) == 0) | |
3651 | { | |
3652 | /* They are equal. There are four possibilities: | |
3653 | ||
3654 | (1) Derived class is defining this virtual function. | |
3655 | (2) Two paths to the same virtual function in the | |
3656 | same base class. | |
3657 | (3) A path to a virtual function declared in one base | |
3658 | class, and another path to a virtual function in a | |
3659 | base class of the base class. | |
3660 | (4) Two paths to the same virtual function in different | |
3661 | base classes. | |
3662 | ||
3663 | The first three cases are ok (non-ambiguous). */ | |
3664 | ||
3665 | tree that_context, tmp; | |
3666 | int this_before_that; | |
3667 | ||
3668 | if (type == BINFO_TYPE (this_context)) | |
3669 | /* case 1. */ | |
3670 | goto ok; | |
3671 | that_context = get_binfo (DECL_CLASS_CONTEXT (MI_VMATRIX (row, col).decl), type, 0); | |
3672 | if (that_context == this_context) | |
3673 | /* case 2. */ | |
3674 | goto ok; | |
3675 | if (that_context != NULL_TREE) | |
3676 | { | |
3677 | tmp = get_binfo (that_context, this_context, 0); | |
3678 | this_before_that = (that_context != tmp); | |
3679 | if (this_before_that == 0) | |
3680 | /* case 3a. */ | |
3681 | goto ok; | |
3682 | tmp = get_binfo (this_context, that_context, 0); | |
3683 | this_before_that = (this_context == tmp); | |
3684 | if (this_before_that != 0) | |
3685 | /* case 3b. */ | |
3686 | goto ok; | |
3687 | ||
3688 | /* case 4. */ | |
3689 | /* These two are not hard errors, but could be | |
3690 | symptoms of bad code. The resultant code | |
3691 | the compiler generates needs to be checked. | |
3692 | (mrs) */ | |
3693 | #if 0 | |
3694 | error_with_decl (MI_VMATRIX (row, col).decl, "ambiguous virtual function `%s'"); | |
3695 | error_with_decl (this_decl, "ambiguating function `%s' (joined by type `%s')", IDENTIFIER_POINTER (current_class_name)); | |
3696 | #endif | |
3697 | } | |
3698 | ok: | |
3699 | MI_VMATRIX (row, col).decl = 0; | |
3700 | ||
3701 | /* Let zeros propagate. */ | |
3702 | if (col == mi_vmax[row]-1) | |
3703 | { | |
3704 | int i = col; | |
3705 | while (i >= mi_vmin[row] | |
3706 | && MI_VMATRIX (row, i).decl == 0) | |
3707 | i--; | |
3708 | mi_vmax[row] = i+1; | |
3709 | } | |
3710 | else if (col == mi_vmin[row]) | |
3711 | { | |
3712 | int i = col; | |
3713 | while (i < mi_vmax[row] | |
3714 | && MI_VMATRIX (row, i).decl == 0) | |
3715 | i++; | |
3716 | mi_vmin[row] = i; | |
3717 | } | |
3718 | } | |
3719 | } | |
3720 | } | |
3721 | } | |
3722 | free (mi_vmatrix + mi_vcols); | |
3723 | mi_vmatrix = 0; | |
3724 | free (mi_vmax + 1); | |
3725 | mi_vmax = 0; | |
3726 | } | |
3727 | \f | |
3728 | /* If we want debug info for a type TYPE, make sure all its base types | |
3729 | are also marked as being potentially interesting. This avoids | |
3730 | the problem of not writing any debug info for intermediate basetypes | |
3731 | that have abstract virtual functions. */ | |
3732 | ||
3733 | void | |
3734 | note_debug_info_needed (type) | |
3735 | tree type; | |
3736 | { | |
3737 | dfs_walk (TYPE_BINFO (type), dfs_debug_mark, dfs_debug_unmarkedp); | |
3738 | } | |
3739 | \f | |
3740 | /* Subroutines of push_class_decls (). */ | |
3741 | ||
3742 | /* Add the instance variables which this class contributed to the | |
3743 | current class binding contour. When a redefinition occurs, | |
3744 | if the redefinition is strictly within a single inheritance path, | |
3745 | we just overwrite (in the case of a data field) or | |
3746 | cons (in the case of a member function) the old declaration with | |
3747 | the new. If the fields are not within a single inheritance path, | |
3748 | we must cons them in either case. */ | |
3749 | ||
3750 | static void | |
3751 | dfs_pushdecls (binfo) | |
3752 | tree binfo; | |
3753 | { | |
3754 | tree type = BINFO_TYPE (binfo); | |
3755 | tree fields, *methods, *end; | |
3756 | tree method_vec; | |
3757 | ||
3758 | for (fields = TYPE_FIELDS (type); fields; fields = TREE_CHAIN (fields)) | |
3759 | { | |
3760 | /* Unmark so that if we are in a constructor, and then find that | |
3761 | this field was initialized by a base initializer, | |
3762 | we can emit an error message. */ | |
3763 | if (TREE_CODE (fields) == FIELD_DECL) | |
3764 | TREE_USED (fields) = 0; | |
3765 | ||
3766 | if (DECL_NAME (fields) == NULL_TREE | |
3767 | && TREE_CODE (TREE_TYPE (fields)) == UNION_TYPE) | |
3768 | { | |
3769 | dfs_pushdecls (TYPE_BINFO (TREE_TYPE (fields))); | |
3770 | continue; | |
3771 | } | |
3772 | if (TREE_CODE (fields) != TYPE_DECL) | |
3773 | { | |
3774 | DECL_PUBLIC (fields) = 0; | |
3775 | DECL_PROTECTED (fields) = 0; | |
3776 | DECL_PRIVATE (fields) = 0; | |
3777 | } | |
3778 | ||
3779 | if (DECL_NAME (fields)) | |
3780 | { | |
3781 | tree value = IDENTIFIER_CLASS_VALUE (DECL_NAME (fields)); | |
3782 | if (value) | |
3783 | { | |
3784 | tree context; | |
3785 | ||
3786 | /* Possible ambiguity. If its defining type(s) | |
3787 | is (are all) derived from us, no problem. */ | |
3788 | ||
3789 | if (TREE_CODE (value) != TREE_LIST) | |
3790 | { | |
3791 | context = DECL_CLASS_CONTEXT (value); | |
3792 | ||
3793 | if (context && (context == type | |
3794 | || TYPE_DERIVES_FROM (context, type))) | |
3795 | value = fields; | |
3796 | else | |
3797 | value = tree_cons (NULL_TREE, fields, | |
3798 | build_tree_list (NULL_TREE, value)); | |
3799 | } | |
3800 | else | |
3801 | { | |
3802 | /* All children may derive from us, in which case | |
3803 | there is no problem. Otherwise, we have to | |
3804 | keep lists around of what the ambiguities might be. */ | |
3805 | tree values; | |
3806 | int problem = 0; | |
3807 | ||
3808 | for (values = value; values; values = TREE_CHAIN (values)) | |
3809 | { | |
3810 | tree sub_values = TREE_VALUE (values); | |
3811 | ||
3812 | if (TREE_CODE (sub_values) == TREE_LIST) | |
3813 | { | |
3814 | for (; sub_values; sub_values = TREE_CHAIN (sub_values)) | |
3815 | { | |
3816 | context = DECL_CLASS_CONTEXT (TREE_VALUE (sub_values)); | |
3817 | ||
3818 | if (! TYPE_DERIVES_FROM (context, type)) | |
3819 | { | |
3820 | value = tree_cons (NULL_TREE, TREE_VALUE (values), value); | |
3821 | problem = 1; | |
3822 | break; | |
3823 | } | |
3824 | } | |
3825 | } | |
3826 | else | |
3827 | { | |
3828 | context = DECL_CLASS_CONTEXT (sub_values); | |
3829 | ||
3830 | if (! TYPE_DERIVES_FROM (context, type)) | |
3831 | { | |
3832 | value = tree_cons (NULL_TREE, values, value); | |
3833 | problem = 1; | |
3834 | break; | |
3835 | } | |
3836 | } | |
3837 | } | |
3838 | if (! problem) value = fields; | |
3839 | } | |
3840 | ||
3841 | /* Mark this as a potentially ambiguous member. */ | |
3842 | if (TREE_CODE (value) == TREE_LIST) | |
3843 | { | |
3844 | /* Leaving TREE_TYPE blank is intentional. | |
3845 | We cannot use `error_mark_node' (lookup_name) | |
3846 | or `unknown_type_node' (all member functions use this). */ | |
3847 | TREE_NONLOCAL_FLAG (value) = 1; | |
3848 | } | |
3849 | ||
3850 | IDENTIFIER_CLASS_VALUE (DECL_NAME (fields)) = value; | |
3851 | } | |
3852 | else IDENTIFIER_CLASS_VALUE (DECL_NAME (fields)) = fields; | |
3853 | } | |
3854 | } | |
3855 | ||
3856 | method_vec = CLASSTYPE_METHOD_VEC (type); | |
3857 | if (method_vec != 0) | |
3858 | { | |
3859 | /* Farm out constructors and destructors. */ | |
3860 | methods = &TREE_VEC_ELT (method_vec, 1); | |
3861 | end = TREE_VEC_END (method_vec); | |
3862 | ||
3863 | /* This does not work for multiple inheritance yet. */ | |
3864 | while (methods != end) | |
3865 | { | |
3866 | /* This will cause lookup_name to return a pointer | |
3867 | to the tree_list of possible methods of this name. | |
3868 | If the order is a problem, we can nreverse them. */ | |
3869 | tree tmp; | |
3870 | tree old = IDENTIFIER_CLASS_VALUE (DECL_NAME (*methods)); | |
3871 | ||
3872 | if (old && TREE_CODE (old) == TREE_LIST) | |
3873 | tmp = tree_cons (DECL_NAME (*methods), *methods, old); | |
3874 | else | |
3875 | { | |
3876 | /* Only complain if we shadow something we can access. */ | |
3877 | if (old && (DECL_CLASS_CONTEXT (old) == current_class_type | |
3878 | || ! TREE_PRIVATE (old))) | |
3879 | /* Should figure out visibility more accurately. */ | |
3880 | warning ("shadowing member `%s' with member function", | |
3881 | IDENTIFIER_POINTER (DECL_NAME (*methods))); | |
3882 | tmp = build_tree_list (DECL_NAME (*methods), *methods); | |
3883 | } | |
3884 | ||
3885 | TREE_TYPE (tmp) = unknown_type_node; | |
3886 | #if 0 | |
3887 | TREE_OVERLOADED (tmp) = DECL_OVERLOADED (*methods); | |
3888 | #endif | |
3889 | TREE_NONLOCAL_FLAG (tmp) = 1; | |
3890 | IDENTIFIER_CLASS_VALUE (DECL_NAME (*methods)) = tmp; | |
3891 | ||
3892 | tmp = *methods; | |
3893 | while (tmp != 0) | |
3894 | { | |
3895 | DECL_PUBLIC (tmp) = 0; | |
3896 | DECL_PROTECTED (tmp) = 0; | |
3897 | DECL_PRIVATE (tmp) = 0; | |
3898 | tmp = DECL_CHAIN (tmp); | |
3899 | } | |
3900 | ||
3901 | methods++; | |
3902 | } | |
3903 | } | |
3904 | SET_BINFO_MARKED (binfo); | |
3905 | } | |
3906 | ||
3907 | /* Consolidate unique (by name) member functions. */ | |
3908 | static void | |
3909 | dfs_compress_decls (binfo) | |
3910 | tree binfo; | |
3911 | { | |
3912 | tree type = BINFO_TYPE (binfo); | |
3913 | tree method_vec = CLASSTYPE_METHOD_VEC (type); | |
3914 | ||
3915 | if (method_vec != 0) | |
3916 | { | |
3917 | /* Farm out constructors and destructors. */ | |
3918 | tree *methods = &TREE_VEC_ELT (method_vec, 1); | |
3919 | tree *end = TREE_VEC_END (method_vec); | |
3920 | ||
3921 | for (; methods != end; methods++) | |
3922 | { | |
3923 | tree tmp = IDENTIFIER_CLASS_VALUE (DECL_NAME (*methods)); | |
3924 | ||
3925 | /* This was replaced in scope by somebody else. Just leave it | |
3926 | alone. */ | |
3927 | if (TREE_CODE (tmp) != TREE_LIST) | |
3928 | continue; | |
3929 | ||
3930 | if (TREE_CHAIN (tmp) == NULL_TREE | |
3931 | && TREE_VALUE (tmp) | |
3932 | && DECL_CHAIN (TREE_VALUE (tmp)) == NULL_TREE) | |
3933 | { | |
3934 | IDENTIFIER_CLASS_VALUE (DECL_NAME (*methods)) = TREE_VALUE (tmp); | |
3935 | } | |
3936 | } | |
3937 | } | |
3938 | CLEAR_BINFO_MARKED (binfo); | |
3939 | } | |
3940 | ||
3941 | /* When entering the scope of a class, we cache all of the | |
3942 | fields that that class provides within its inheritance | |
3943 | lattice. Where ambiguities result, we mark them | |
3944 | with `error_mark_node' so that if they are encountered | |
3945 | without explicit qualification, we can emit an error | |
3946 | message. */ | |
3947 | void | |
3948 | push_class_decls (type) | |
3949 | tree type; | |
3950 | { | |
3951 | tree id; | |
3952 | struct obstack *ambient_obstack = current_obstack; | |
3953 | ||
3954 | #if 0 | |
3955 | tree tags = CLASSTYPE_TAGS (type); | |
3956 | ||
3957 | while (tags) | |
3958 | { | |
3959 | tree code_type_node; | |
3960 | tree tag; | |
3961 | ||
3962 | switch (TREE_CODE (TREE_VALUE (tags))) | |
3963 | { | |
3964 | case ENUMERAL_TYPE: | |
3965 | code_type_node = enum_type_node; | |
3966 | break; | |
3967 | case RECORD_TYPE: | |
3968 | code_type_node = record_type_node; | |
3969 | break; | |
3970 | case CLASS_TYPE: | |
3971 | code_type_node = class_type_node; | |
3972 | break; | |
3973 | case UNION_TYPE: | |
3974 | code_type_node = union_type_node; | |
3975 | break; | |
3976 | default: | |
3977 | my_friendly_abort (297); | |
3978 | } | |
3979 | tag = xref_tag (code_type_node, TREE_PURPOSE (tags), | |
3980 | TYPE_BINFO_BASETYPE (TREE_VALUE (tags), 0)); | |
3981 | #if 0 /* not yet, should get fixed properly later */ | |
3982 | pushdecl (make_type_decl (TREE_PURPOSE (tags), TREE_VALUE (tags))); | |
3983 | #else | |
3984 | pushdecl (build_decl (TYPE_DECL, TREE_PURPOSE (tags), TREE_VALUE (tags))); | |
3985 | #endif | |
3986 | } | |
3987 | #endif | |
3988 | ||
3989 | current_obstack = &bridge_obstack; | |
3990 | search_stack = push_search_level (search_stack, &bridge_obstack); | |
3991 | ||
3992 | id = TYPE_IDENTIFIER (type); | |
3993 | if (IDENTIFIER_TEMPLATE (id) != 0) | |
3994 | { | |
3995 | #if 0 | |
3996 | tree tmpl = IDENTIFIER_TEMPLATE (id); | |
3997 | push_template_decls (DECL_ARGUMENTS (TREE_PURPOSE (tmpl)), | |
3998 | TREE_VALUE (tmpl), 1); | |
3999 | #endif | |
4000 | overload_template_name (id, 0); | |
4001 | } | |
4002 | ||
4003 | /* Push class fields into CLASS_VALUE scope, and mark. */ | |
4004 | dfs_walk (TYPE_BINFO (type), dfs_pushdecls, unmarkedp); | |
4005 | ||
4006 | /* Compress fields which have only a single entry | |
4007 | by a given name, and unmark. */ | |
4008 | dfs_walk (TYPE_BINFO (type), dfs_compress_decls, markedp); | |
4009 | current_obstack = ambient_obstack; | |
4010 | } | |
4011 | ||
4012 | static void | |
4013 | dfs_popdecls (binfo) | |
4014 | tree binfo; | |
4015 | { | |
4016 | tree type = BINFO_TYPE (binfo); | |
4017 | tree fields = TYPE_FIELDS (type); | |
4018 | tree method_vec = CLASSTYPE_METHOD_VEC (type); | |
4019 | ||
4020 | while (fields) | |
4021 | { | |
4022 | if (DECL_NAME (fields) == NULL_TREE | |
4023 | && TREE_CODE (TREE_TYPE (fields)) == UNION_TYPE) | |
4024 | { | |
4025 | dfs_popdecls (TYPE_BINFO (TREE_TYPE (fields))); | |
4026 | } | |
4027 | else if (DECL_NAME (fields)) | |
4028 | IDENTIFIER_CLASS_VALUE (DECL_NAME (fields)) = NULL_TREE; | |
4029 | fields = TREE_CHAIN (fields); | |
4030 | } | |
4031 | if (method_vec != 0) | |
4032 | { | |
4033 | tree *methods = &TREE_VEC_ELT (method_vec, 0); | |
4034 | tree *end = TREE_VEC_END (method_vec); | |
4035 | ||
4036 | /* Clear out ctors and dtors. */ | |
4037 | if (*methods) | |
4038 | IDENTIFIER_CLASS_VALUE (TYPE_IDENTIFIER (type)) = NULL_TREE; | |
4039 | ||
4040 | for (methods += 1; methods != end; methods++) | |
4041 | IDENTIFIER_CLASS_VALUE (DECL_NAME (*methods)) = NULL_TREE; | |
4042 | } | |
4043 | ||
4044 | SET_BINFO_MARKED (binfo); | |
4045 | } | |
4046 | ||
4047 | void | |
4048 | pop_class_decls (type) | |
4049 | tree type; | |
4050 | { | |
4051 | tree binfo = TYPE_BINFO (type); | |
4052 | ||
4053 | /* Clear out the IDENTIFIER_CLASS_VALUE which this | |
4054 | class may have occupied, and mark. */ | |
4055 | dfs_walk (binfo, dfs_popdecls, unmarkedp); | |
4056 | ||
4057 | /* Unmark. */ | |
4058 | dfs_walk (binfo, dfs_unmark, markedp); | |
4059 | ||
4060 | #if 0 | |
4061 | tmpl = IDENTIFIER_TEMPLATE (TYPE_IDENTIFIER (type)); | |
4062 | if (tmpl != 0) | |
4063 | pop_template_decls (DECL_ARGUMENTS (TREE_PURPOSE (tmpl)), | |
4064 | TREE_VALUE (tmpl), 1); | |
4065 | #endif | |
4066 | ||
4067 | search_stack = pop_search_level (search_stack); | |
4068 | } | |
4069 | ||
4070 | /* Given a base type PARENT, and a derived type TYPE, build | |
4071 | a name which distinguishes exactly the PARENT member of TYPE's type. | |
4072 | ||
4073 | FORMAT is a string which controls how sprintf formats the name | |
4074 | we have generated. | |
4075 | ||
4076 | For example, given | |
4077 | ||
4078 | class A; class B; class C : A, B; | |
4079 | ||
4080 | it is possible to distinguish "A" from "C's A". And given | |
4081 | ||
4082 | class L; | |
4083 | class A : L; class B : L; class C : A, B; | |
4084 | ||
4085 | it is possible to distinguish "L" from "A's L", and also from | |
4086 | "C's L from A". | |
4087 | ||
4088 | Make sure to use the DECL_ASSEMBLER_NAME of the TYPE_NAME of the | |
4089 | type, as template have DECL_NAMEs like: X<int>, whereas the | |
4090 | DECL_ASSEMBLER_NAME is set to be something the assembler can handle. | |
4091 | */ | |
4092 | tree | |
4093 | build_type_pathname (format, parent, type) | |
4094 | char *format; | |
4095 | tree parent, type; | |
4096 | { | |
4097 | extern struct obstack temporary_obstack; | |
4098 | char *first, *base, *name; | |
4099 | int i; | |
4100 | tree id; | |
4101 | ||
4102 | parent = TYPE_MAIN_VARIANT (parent); | |
4103 | ||
4104 | /* Remember where to cut the obstack to. */ | |
4105 | first = obstack_base (&temporary_obstack); | |
4106 | ||
4107 | /* Put on TYPE+PARENT. */ | |
4108 | obstack_grow (&temporary_obstack, | |
4109 | TYPE_ASSEMBLER_NAME_STRING (type), | |
4110 | TYPE_ASSEMBLER_NAME_LENGTH (type)); | |
4111 | #ifdef JOINER | |
4112 | obstack_1grow (&temporary_obstack, JOINER); | |
4113 | #else | |
4114 | obstack_1grow (&temporary_obstack, '_'); | |
4115 | #endif | |
4116 | obstack_grow0 (&temporary_obstack, | |
4117 | TYPE_ASSEMBLER_NAME_STRING (parent), | |
4118 | TYPE_ASSEMBLER_NAME_LENGTH (parent)); | |
4119 | i = obstack_object_size (&temporary_obstack); | |
4120 | base = obstack_base (&temporary_obstack); | |
4121 | obstack_finish (&temporary_obstack); | |
4122 | ||
4123 | /* Put on FORMAT+TYPE+PARENT. */ | |
4124 | obstack_blank (&temporary_obstack, strlen (format) + i + 1); | |
4125 | name = obstack_base (&temporary_obstack); | |
4126 | sprintf (name, format, base); | |
4127 | id = get_identifier (name); | |
4128 | obstack_free (&temporary_obstack, first); | |
4129 | ||
4130 | return id; | |
4131 | } | |
4132 | ||
4133 | static int | |
4134 | bfs_unmark_finished_struct (binfo, i) | |
4135 | tree binfo; | |
4136 | int i; | |
4137 | { | |
4138 | if (i >= 0) | |
4139 | binfo = BINFO_BASETYPE (binfo, i); | |
4140 | ||
4141 | if (BINFO_NEW_VTABLE_MARKED (binfo)) | |
4142 | { | |
4143 | tree decl, context; | |
4144 | ||
4145 | if (TREE_VIA_VIRTUAL (binfo)) | |
4146 | binfo = binfo_member (BINFO_TYPE (binfo), | |
4147 | CLASSTYPE_VBASECLASSES (current_class_type)); | |
4148 | ||
4149 | decl = BINFO_VTABLE (binfo); | |
4150 | context = DECL_CONTEXT (decl); | |
4151 | DECL_CONTEXT (decl) = 0; | |
4152 | if (write_virtuals >= 0 | |
4153 | && DECL_INITIAL (decl) != BINFO_VIRTUALS (binfo)) | |
4154 | DECL_INITIAL (decl) = build_nt (CONSTRUCTOR, NULL_TREE, | |
4155 | BINFO_VIRTUALS (binfo)); | |
4156 | finish_decl (decl, DECL_INITIAL (decl), NULL_TREE, 0); | |
4157 | DECL_CONTEXT (decl) = context; | |
4158 | } | |
4159 | CLEAR_BINFO_VTABLE_PATH_MARKED (binfo); | |
4160 | CLEAR_BINFO_NEW_VTABLE_MARKED (binfo); | |
4161 | return 0; | |
4162 | } | |
4163 | ||
4164 | void | |
4165 | unmark_finished_struct (type) | |
4166 | tree type; | |
4167 | { | |
4168 | tree binfo = TYPE_BINFO (type); | |
4169 | bfs_unmark_finished_struct (binfo, -1); | |
4170 | breadth_first_search (binfo, bfs_unmark_finished_struct, bfs_marked_vtable_pathp); | |
4171 | } | |
4172 | ||
4173 | void | |
4174 | print_search_statistics () | |
4175 | { | |
4176 | #ifdef GATHER_STATISTICS | |
4177 | if (flag_memoize_lookups) | |
4178 | { | |
4179 | fprintf (stderr, "%d memoized contexts saved\n", | |
4180 | n_contexts_saved); | |
4181 | fprintf (stderr, "%d local tree nodes made\n", my_tree_node_counter); | |
4182 | fprintf (stderr, "%d local hash nodes made\n", my_memoized_entry_counter); | |
4183 | fprintf (stderr, "fields statistics:\n"); | |
4184 | fprintf (stderr, " memoized finds = %d; rejects = %d; (searches = %d)\n", | |
4185 | memoized_fast_finds[0], memoized_fast_rejects[0], | |
4186 | memoized_fields_searched[0]); | |
4187 | fprintf (stderr, " memoized_adds = %d\n", memoized_adds[0]); | |
4188 | fprintf (stderr, "fnfields statistics:\n"); | |
4189 | fprintf (stderr, " memoized finds = %d; rejects = %d; (searches = %d)\n", | |
4190 | memoized_fast_finds[1], memoized_fast_rejects[1], | |
4191 | memoized_fields_searched[1]); | |
4192 | fprintf (stderr, " memoized_adds = %d\n", memoized_adds[1]); | |
4193 | } | |
4194 | fprintf (stderr, "%d fields searched in %d[%d] calls to lookup_field[_1]\n", | |
4195 | n_fields_searched, n_calls_lookup_field, n_calls_lookup_field_1); | |
4196 | fprintf (stderr, "%d fnfields searched in %d calls to lookup_fnfields\n", | |
4197 | n_outer_fields_searched, n_calls_lookup_fnfields); | |
4198 | fprintf (stderr, "%d calls to get_base_type\n", n_calls_get_base_type); | |
4199 | #else | |
4200 | fprintf (stderr, "no search statistics\n"); | |
4201 | #endif | |
4202 | } | |
4203 | ||
4204 | void | |
4205 | init_search_processing () | |
4206 | { | |
4207 | gcc_obstack_init (&search_obstack); | |
4208 | gcc_obstack_init (&type_obstack); | |
4209 | gcc_obstack_init (&type_obstack_entries); | |
4210 | gcc_obstack_init (&bridge_obstack); | |
4211 | ||
4212 | /* This gives us room to build our chains of basetypes, | |
4213 | whether or not we decide to memoize them. */ | |
4214 | type_stack = push_type_level (0, &type_obstack); | |
4215 | _vptr_name = get_identifier ("_vptr"); | |
4216 | } | |
4217 | ||
4218 | void | |
4219 | reinit_search_statistics () | |
4220 | { | |
4221 | my_memoized_entry_counter = 0; | |
4222 | memoized_fast_finds[0] = 0; | |
4223 | memoized_fast_finds[1] = 0; | |
4224 | memoized_adds[0] = 0; | |
4225 | memoized_adds[1] = 0; | |
4226 | memoized_fast_rejects[0] = 0; | |
4227 | memoized_fast_rejects[1] = 0; | |
4228 | memoized_fields_searched[0] = 0; | |
4229 | memoized_fields_searched[1] = 0; | |
4230 | n_fields_searched = 0; | |
4231 | n_calls_lookup_field = 0, n_calls_lookup_field_1 = 0; | |
4232 | n_calls_lookup_fnfields = 0, n_calls_lookup_fnfields_1 = 0; | |
4233 | n_calls_get_base_type = 0; | |
4234 | n_outer_fields_searched = 0; | |
4235 | n_contexts_saved = 0; | |
4236 | } |