| 1 | /* Language-dependent node constructors for parse phase of GNU compiler. |
| 2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
| 3 | 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. |
| 4 | Hacked by Michael Tiemann (tiemann@cygnus.com) |
| 5 | |
| 6 | This file is part of GCC. |
| 7 | |
| 8 | GCC is free software; you can redistribute it and/or modify |
| 9 | it under the terms of the GNU General Public License as published by |
| 10 | the Free Software Foundation; either version 2, or (at your option) |
| 11 | any later version. |
| 12 | |
| 13 | GCC is distributed in the hope that it will be useful, |
| 14 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 16 | GNU General Public License for more details. |
| 17 | |
| 18 | You should have received a copy of the GNU General Public License |
| 19 | along with GCC; see the file COPYING. If not, write to |
| 20 | the Free Software Foundation, 59 Temple Place - Suite 330, |
| 21 | Boston, MA 02111-1307, USA. */ |
| 22 | |
| 23 | #include "config.h" |
| 24 | #include "system.h" |
| 25 | #include "coretypes.h" |
| 26 | #include "tm.h" |
| 27 | #include "tree.h" |
| 28 | #include "cp-tree.h" |
| 29 | #include "flags.h" |
| 30 | #include "real.h" |
| 31 | #include "rtl.h" |
| 32 | #include "toplev.h" |
| 33 | #include "insn-config.h" |
| 34 | #include "integrate.h" |
| 35 | #include "tree-inline.h" |
| 36 | #include "target.h" |
| 37 | |
| 38 | static tree bot_manip (tree *, int *, void *); |
| 39 | static tree bot_replace (tree *, int *, void *); |
| 40 | static tree build_cplus_array_type_1 (tree, tree); |
| 41 | static int list_hash_eq (const void *, const void *); |
| 42 | static hashval_t list_hash_pieces (tree, tree, tree); |
| 43 | static hashval_t list_hash (const void *); |
| 44 | static cp_lvalue_kind lvalue_p_1 (tree, int); |
| 45 | static tree no_linkage_helper (tree *, int *, void *); |
| 46 | static tree mark_local_for_remap_r (tree *, int *, void *); |
| 47 | static tree cp_unsave_r (tree *, int *, void *); |
| 48 | static tree build_target_expr (tree, tree); |
| 49 | static tree count_trees_r (tree *, int *, void *); |
| 50 | static tree verify_stmt_tree_r (tree *, int *, void *); |
| 51 | static tree find_tree_r (tree *, int *, void *); |
| 52 | static tree build_local_temp (tree); |
| 53 | |
| 54 | static tree handle_java_interface_attribute (tree *, tree, tree, int, bool *); |
| 55 | static tree handle_com_interface_attribute (tree *, tree, tree, int, bool *); |
| 56 | static tree handle_init_priority_attribute (tree *, tree, tree, int, bool *); |
| 57 | |
| 58 | /* If REF is an lvalue, returns the kind of lvalue that REF is. |
| 59 | Otherwise, returns clk_none. If TREAT_CLASS_RVALUES_AS_LVALUES is |
| 60 | nonzero, rvalues of class type are considered lvalues. */ |
| 61 | |
| 62 | static cp_lvalue_kind |
| 63 | lvalue_p_1 (tree ref, |
| 64 | int treat_class_rvalues_as_lvalues) |
| 65 | { |
| 66 | cp_lvalue_kind op1_lvalue_kind = clk_none; |
| 67 | cp_lvalue_kind op2_lvalue_kind = clk_none; |
| 68 | |
| 69 | if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE) |
| 70 | return clk_ordinary; |
| 71 | |
| 72 | if (ref == current_class_ptr) |
| 73 | return clk_none; |
| 74 | |
| 75 | switch (TREE_CODE (ref)) |
| 76 | { |
| 77 | /* preincrements and predecrements are valid lvals, provided |
| 78 | what they refer to are valid lvals. */ |
| 79 | case PREINCREMENT_EXPR: |
| 80 | case PREDECREMENT_EXPR: |
| 81 | case SAVE_EXPR: |
| 82 | case UNSAVE_EXPR: |
| 83 | case TRY_CATCH_EXPR: |
| 84 | case WITH_CLEANUP_EXPR: |
| 85 | case REALPART_EXPR: |
| 86 | case IMAGPART_EXPR: |
| 87 | return lvalue_p_1 (TREE_OPERAND (ref, 0), |
| 88 | treat_class_rvalues_as_lvalues); |
| 89 | |
| 90 | case COMPONENT_REF: |
| 91 | op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0), |
| 92 | treat_class_rvalues_as_lvalues); |
| 93 | /* In an expression of the form "X.Y", the packed-ness of the |
| 94 | expression does not depend on "X". */ |
| 95 | op1_lvalue_kind &= ~clk_packed; |
| 96 | /* Look at the member designator. */ |
| 97 | if (!op1_lvalue_kind |
| 98 | /* The "field" can be a FUNCTION_DECL or an OVERLOAD in some |
| 99 | situations. */ |
| 100 | || TREE_CODE (TREE_OPERAND (ref, 1)) != FIELD_DECL) |
| 101 | ; |
| 102 | else if (DECL_C_BIT_FIELD (TREE_OPERAND (ref, 1))) |
| 103 | { |
| 104 | /* Clear the ordinary bit. If this object was a class |
| 105 | rvalue we want to preserve that information. */ |
| 106 | op1_lvalue_kind &= ~clk_ordinary; |
| 107 | /* The lvalue is for a bitfield. */ |
| 108 | op1_lvalue_kind |= clk_bitfield; |
| 109 | } |
| 110 | else if (DECL_PACKED (TREE_OPERAND (ref, 1))) |
| 111 | op1_lvalue_kind |= clk_packed; |
| 112 | |
| 113 | return op1_lvalue_kind; |
| 114 | |
| 115 | case STRING_CST: |
| 116 | return clk_ordinary; |
| 117 | |
| 118 | case VAR_DECL: |
| 119 | if (TREE_READONLY (ref) && ! TREE_STATIC (ref) |
| 120 | && DECL_LANG_SPECIFIC (ref) |
| 121 | && DECL_IN_AGGR_P (ref)) |
| 122 | return clk_none; |
| 123 | case INDIRECT_REF: |
| 124 | case ARRAY_REF: |
| 125 | case PARM_DECL: |
| 126 | case RESULT_DECL: |
| 127 | if (TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE) |
| 128 | return clk_ordinary; |
| 129 | break; |
| 130 | |
| 131 | /* A currently unresolved scope ref. */ |
| 132 | case SCOPE_REF: |
| 133 | abort (); |
| 134 | case MAX_EXPR: |
| 135 | case MIN_EXPR: |
| 136 | op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0), |
| 137 | treat_class_rvalues_as_lvalues); |
| 138 | op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1), |
| 139 | treat_class_rvalues_as_lvalues); |
| 140 | break; |
| 141 | |
| 142 | case COND_EXPR: |
| 143 | op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1), |
| 144 | treat_class_rvalues_as_lvalues); |
| 145 | op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 2), |
| 146 | treat_class_rvalues_as_lvalues); |
| 147 | break; |
| 148 | |
| 149 | case MODIFY_EXPR: |
| 150 | return clk_ordinary; |
| 151 | |
| 152 | case COMPOUND_EXPR: |
| 153 | return lvalue_p_1 (TREE_OPERAND (ref, 1), |
| 154 | treat_class_rvalues_as_lvalues); |
| 155 | |
| 156 | case TARGET_EXPR: |
| 157 | return treat_class_rvalues_as_lvalues ? clk_class : clk_none; |
| 158 | |
| 159 | case CALL_EXPR: |
| 160 | case VA_ARG_EXPR: |
| 161 | /* Any class-valued call would be wrapped in a TARGET_EXPR. */ |
| 162 | return clk_none; |
| 163 | |
| 164 | case FUNCTION_DECL: |
| 165 | /* All functions (except non-static-member functions) are |
| 166 | lvalues. */ |
| 167 | return (DECL_NONSTATIC_MEMBER_FUNCTION_P (ref) |
| 168 | ? clk_none : clk_ordinary); |
| 169 | |
| 170 | case NON_DEPENDENT_EXPR: |
| 171 | /* We must consider NON_DEPENDENT_EXPRs to be lvalues so that |
| 172 | things like "&E" where "E" is an expression with a |
| 173 | non-dependent type work. It is safe to be lenient because an |
| 174 | error will be issued when the template is instantiated if "E" |
| 175 | is not an lvalue. */ |
| 176 | return clk_ordinary; |
| 177 | |
| 178 | default: |
| 179 | break; |
| 180 | } |
| 181 | |
| 182 | /* If one operand is not an lvalue at all, then this expression is |
| 183 | not an lvalue. */ |
| 184 | if (!op1_lvalue_kind || !op2_lvalue_kind) |
| 185 | return clk_none; |
| 186 | |
| 187 | /* Otherwise, it's an lvalue, and it has all the odd properties |
| 188 | contributed by either operand. */ |
| 189 | op1_lvalue_kind = op1_lvalue_kind | op2_lvalue_kind; |
| 190 | /* It's not an ordinary lvalue if it involves either a bit-field or |
| 191 | a class rvalue. */ |
| 192 | if ((op1_lvalue_kind & ~clk_ordinary) != clk_none) |
| 193 | op1_lvalue_kind &= ~clk_ordinary; |
| 194 | return op1_lvalue_kind; |
| 195 | } |
| 196 | |
| 197 | /* Returns the kind of lvalue that REF is, in the sense of |
| 198 | [basic.lval]. This function should really be named lvalue_p; it |
| 199 | computes the C++ definition of lvalue. */ |
| 200 | |
| 201 | cp_lvalue_kind |
| 202 | real_lvalue_p (tree ref) |
| 203 | { |
| 204 | return lvalue_p_1 (ref, |
| 205 | /*treat_class_rvalues_as_lvalues=*/0); |
| 206 | } |
| 207 | |
| 208 | /* This differs from real_lvalue_p in that class rvalues are |
| 209 | considered lvalues. */ |
| 210 | |
| 211 | int |
| 212 | lvalue_p (tree ref) |
| 213 | { |
| 214 | return |
| 215 | (lvalue_p_1 (ref, /*class rvalue ok*/ 1) != clk_none); |
| 216 | } |
| 217 | |
| 218 | /* Return nonzero if REF is an lvalue valid for this language; |
| 219 | otherwise, print an error message and return zero. */ |
| 220 | |
| 221 | int |
| 222 | lvalue_or_else (tree ref, const char* string) |
| 223 | { |
| 224 | if (!lvalue_p (ref)) |
| 225 | { |
| 226 | error ("non-lvalue in %s", string); |
| 227 | return 0; |
| 228 | } |
| 229 | return 1; |
| 230 | } |
| 231 | |
| 232 | /* Build a TARGET_EXPR, initializing the DECL with the VALUE. */ |
| 233 | |
| 234 | static tree |
| 235 | build_target_expr (tree decl, tree value) |
| 236 | { |
| 237 | tree t; |
| 238 | |
| 239 | t = build (TARGET_EXPR, TREE_TYPE (decl), decl, value, |
| 240 | cxx_maybe_build_cleanup (decl), NULL_TREE); |
| 241 | /* We always set TREE_SIDE_EFFECTS so that expand_expr does not |
| 242 | ignore the TARGET_EXPR. If there really turn out to be no |
| 243 | side-effects, then the optimizer should be able to get rid of |
| 244 | whatever code is generated anyhow. */ |
| 245 | TREE_SIDE_EFFECTS (t) = 1; |
| 246 | |
| 247 | return t; |
| 248 | } |
| 249 | |
| 250 | /* Return an undeclared local temporary of type TYPE for use in building a |
| 251 | TARGET_EXPR. */ |
| 252 | |
| 253 | static tree |
| 254 | build_local_temp (tree type) |
| 255 | { |
| 256 | tree slot = build_decl (VAR_DECL, NULL_TREE, type); |
| 257 | DECL_ARTIFICIAL (slot) = 1; |
| 258 | DECL_CONTEXT (slot) = current_function_decl; |
| 259 | layout_decl (slot, 0); |
| 260 | return slot; |
| 261 | } |
| 262 | |
| 263 | /* INIT is a CALL_EXPR which needs info about its target. |
| 264 | TYPE is the type that this initialization should appear to have. |
| 265 | |
| 266 | Build an encapsulation of the initialization to perform |
| 267 | and return it so that it can be processed by language-independent |
| 268 | and language-specific expression expanders. */ |
| 269 | |
| 270 | tree |
| 271 | build_cplus_new (tree type, tree init) |
| 272 | { |
| 273 | tree fn; |
| 274 | tree slot; |
| 275 | tree rval; |
| 276 | int is_ctor; |
| 277 | |
| 278 | /* Make sure that we're not trying to create an instance of an |
| 279 | abstract class. */ |
| 280 | abstract_virtuals_error (NULL_TREE, type); |
| 281 | |
| 282 | if (TREE_CODE (init) != CALL_EXPR && TREE_CODE (init) != AGGR_INIT_EXPR) |
| 283 | return convert (type, init); |
| 284 | |
| 285 | fn = TREE_OPERAND (init, 0); |
| 286 | is_ctor = (TREE_CODE (fn) == ADDR_EXPR |
| 287 | && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL |
| 288 | && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0))); |
| 289 | |
| 290 | slot = build_local_temp (type); |
| 291 | |
| 292 | /* We split the CALL_EXPR into its function and its arguments here. |
| 293 | Then, in expand_expr, we put them back together. The reason for |
| 294 | this is that this expression might be a default argument |
| 295 | expression. In that case, we need a new temporary every time the |
| 296 | expression is used. That's what break_out_target_exprs does; it |
| 297 | replaces every AGGR_INIT_EXPR with a copy that uses a fresh |
| 298 | temporary slot. Then, expand_expr builds up a call-expression |
| 299 | using the new slot. */ |
| 300 | |
| 301 | /* If we don't need to use a constructor to create an object of this |
| 302 | type, don't mess with AGGR_INIT_EXPR. */ |
| 303 | if (is_ctor || TREE_ADDRESSABLE (type)) |
| 304 | { |
| 305 | rval = build (AGGR_INIT_EXPR, type, fn, TREE_OPERAND (init, 1), slot); |
| 306 | TREE_SIDE_EFFECTS (rval) = 1; |
| 307 | AGGR_INIT_VIA_CTOR_P (rval) = is_ctor; |
| 308 | } |
| 309 | else |
| 310 | rval = init; |
| 311 | |
| 312 | rval = build_target_expr (slot, rval); |
| 313 | |
| 314 | return rval; |
| 315 | } |
| 316 | |
| 317 | /* Build a TARGET_EXPR using INIT to initialize a new temporary of the |
| 318 | indicated TYPE. */ |
| 319 | |
| 320 | tree |
| 321 | build_target_expr_with_type (tree init, tree type) |
| 322 | { |
| 323 | tree slot; |
| 324 | |
| 325 | if (TREE_CODE (init) == TARGET_EXPR) |
| 326 | return init; |
| 327 | else if (CLASS_TYPE_P (type) && !TYPE_HAS_TRIVIAL_INIT_REF (type) |
| 328 | && TREE_CODE (init) != COND_EXPR |
| 329 | && TREE_CODE (init) != CONSTRUCTOR |
| 330 | && TREE_CODE (init) != VA_ARG_EXPR) |
| 331 | /* We need to build up a copy constructor call. COND_EXPR is a special |
| 332 | case because we already have copies on the arms and we don't want |
| 333 | another one here. A CONSTRUCTOR is aggregate initialization, which |
| 334 | is handled separately. A VA_ARG_EXPR is magic creation of an |
| 335 | aggregate; there's no additional work to be done. */ |
| 336 | return force_rvalue (init); |
| 337 | |
| 338 | slot = build_local_temp (type); |
| 339 | return build_target_expr (slot, init); |
| 340 | } |
| 341 | |
| 342 | /* Like the above function, but without the checking. This function should |
| 343 | only be used by code which is deliberately trying to subvert the type |
| 344 | system, such as call_builtin_trap. */ |
| 345 | |
| 346 | tree |
| 347 | force_target_expr (tree type, tree init) |
| 348 | { |
| 349 | tree slot = build_local_temp (type); |
| 350 | return build_target_expr (slot, init); |
| 351 | } |
| 352 | |
| 353 | /* Like build_target_expr_with_type, but use the type of INIT. */ |
| 354 | |
| 355 | tree |
| 356 | get_target_expr (tree init) |
| 357 | { |
| 358 | return build_target_expr_with_type (init, TREE_TYPE (init)); |
| 359 | } |
| 360 | |
| 361 | \f |
| 362 | static tree |
| 363 | build_cplus_array_type_1 (tree elt_type, tree index_type) |
| 364 | { |
| 365 | tree t; |
| 366 | |
| 367 | if (elt_type == error_mark_node || index_type == error_mark_node) |
| 368 | return error_mark_node; |
| 369 | |
| 370 | if (dependent_type_p (elt_type) |
| 371 | || (index_type |
| 372 | && value_dependent_expression_p (TYPE_MAX_VALUE (index_type)))) |
| 373 | { |
| 374 | t = make_node (ARRAY_TYPE); |
| 375 | TREE_TYPE (t) = elt_type; |
| 376 | TYPE_DOMAIN (t) = index_type; |
| 377 | } |
| 378 | else |
| 379 | t = build_array_type (elt_type, index_type); |
| 380 | |
| 381 | /* Push these needs up so that initialization takes place |
| 382 | more easily. */ |
| 383 | TYPE_NEEDS_CONSTRUCTING (t) |
| 384 | = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type)); |
| 385 | TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t) |
| 386 | = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type)); |
| 387 | return t; |
| 388 | } |
| 389 | |
| 390 | tree |
| 391 | build_cplus_array_type (tree elt_type, tree index_type) |
| 392 | { |
| 393 | tree t; |
| 394 | int type_quals = cp_type_quals (elt_type); |
| 395 | |
| 396 | if (type_quals != TYPE_UNQUALIFIED) |
| 397 | elt_type = cp_build_qualified_type (elt_type, TYPE_UNQUALIFIED); |
| 398 | |
| 399 | t = build_cplus_array_type_1 (elt_type, index_type); |
| 400 | |
| 401 | if (type_quals != TYPE_UNQUALIFIED) |
| 402 | t = cp_build_qualified_type (t, type_quals); |
| 403 | |
| 404 | return t; |
| 405 | } |
| 406 | \f |
| 407 | /* Make a variant of TYPE, qualified with the TYPE_QUALS. Handles |
| 408 | arrays correctly. In particular, if TYPE is an array of T's, and |
| 409 | TYPE_QUALS is non-empty, returns an array of qualified T's. |
| 410 | |
| 411 | FLAGS determines how to deal with illformed qualifications. If |
| 412 | tf_ignore_bad_quals is set, then bad qualifications are dropped |
| 413 | (this is permitted if TYPE was introduced via a typedef or template |
| 414 | type parameter). If bad qualifications are dropped and tf_warning |
| 415 | is set, then a warning is issued for non-const qualifications. If |
| 416 | tf_ignore_bad_quals is not set and tf_error is not set, we |
| 417 | return error_mark_node. Otherwise, we issue an error, and ignore |
| 418 | the qualifications. |
| 419 | |
| 420 | Qualification of a reference type is valid when the reference came |
| 421 | via a typedef or template type argument. [dcl.ref] No such |
| 422 | dispensation is provided for qualifying a function type. [dcl.fct] |
| 423 | DR 295 queries this and the proposed resolution brings it into line |
| 424 | with qualifying a reference. We implement the DR. We also behave |
| 425 | in a similar manner for restricting non-pointer types. */ |
| 426 | |
| 427 | tree |
| 428 | cp_build_qualified_type_real (tree type, |
| 429 | int type_quals, |
| 430 | tsubst_flags_t complain) |
| 431 | { |
| 432 | tree result; |
| 433 | int bad_quals = TYPE_UNQUALIFIED; |
| 434 | /* We keep bad function qualifiers separate, so that we can decide |
| 435 | whether to implement DR 295 or not. DR 295 break existing code, |
| 436 | unfortunately. Remove this variable to implement the defect |
| 437 | report. */ |
| 438 | int bad_func_quals = TYPE_UNQUALIFIED; |
| 439 | |
| 440 | if (type == error_mark_node) |
| 441 | return type; |
| 442 | |
| 443 | if (type_quals == cp_type_quals (type)) |
| 444 | return type; |
| 445 | |
| 446 | if (TREE_CODE (type) == ARRAY_TYPE) |
| 447 | { |
| 448 | /* In C++, the qualification really applies to the array element |
| 449 | type. Obtain the appropriately qualified element type. */ |
| 450 | tree t; |
| 451 | tree element_type |
| 452 | = cp_build_qualified_type_real (TREE_TYPE (type), |
| 453 | type_quals, |
| 454 | complain); |
| 455 | |
| 456 | if (element_type == error_mark_node) |
| 457 | return error_mark_node; |
| 458 | |
| 459 | /* See if we already have an identically qualified type. */ |
| 460 | for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) |
| 461 | if (cp_type_quals (t) == type_quals |
| 462 | && TYPE_NAME (t) == TYPE_NAME (type) |
| 463 | && TYPE_CONTEXT (t) == TYPE_CONTEXT (type)) |
| 464 | break; |
| 465 | |
| 466 | if (!t) |
| 467 | { |
| 468 | /* Make a new array type, just like the old one, but with the |
| 469 | appropriately qualified element type. */ |
| 470 | t = build_type_copy (type); |
| 471 | TREE_TYPE (t) = element_type; |
| 472 | } |
| 473 | |
| 474 | /* Even if we already had this variant, we update |
| 475 | TYPE_NEEDS_CONSTRUCTING and TYPE_HAS_NONTRIVIAL_DESTRUCTOR in case |
| 476 | they changed since the variant was originally created. |
| 477 | |
| 478 | This seems hokey; if there is some way to use a previous |
| 479 | variant *without* coming through here, |
| 480 | TYPE_NEEDS_CONSTRUCTING will never be updated. */ |
| 481 | TYPE_NEEDS_CONSTRUCTING (t) |
| 482 | = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (element_type)); |
| 483 | TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t) |
| 484 | = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TYPE_MAIN_VARIANT (element_type)); |
| 485 | return t; |
| 486 | } |
| 487 | else if (TYPE_PTRMEMFUNC_P (type)) |
| 488 | { |
| 489 | /* For a pointer-to-member type, we can't just return a |
| 490 | cv-qualified version of the RECORD_TYPE. If we do, we |
| 491 | haven't changed the field that contains the actual pointer to |
| 492 | a method, and so TYPE_PTRMEMFUNC_FN_TYPE will be wrong. */ |
| 493 | tree t; |
| 494 | |
| 495 | t = TYPE_PTRMEMFUNC_FN_TYPE (type); |
| 496 | t = cp_build_qualified_type_real (t, type_quals, complain); |
| 497 | return build_ptrmemfunc_type (t); |
| 498 | } |
| 499 | |
| 500 | /* A reference, function or method type shall not be cv qualified. |
| 501 | [dcl.ref], [dct.fct] */ |
| 502 | if (type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE) |
| 503 | && (TREE_CODE (type) == REFERENCE_TYPE |
| 504 | || TREE_CODE (type) == FUNCTION_TYPE |
| 505 | || TREE_CODE (type) == METHOD_TYPE)) |
| 506 | { |
| 507 | bad_quals |= type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE); |
| 508 | if (TREE_CODE (type) != REFERENCE_TYPE) |
| 509 | bad_func_quals |= type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE); |
| 510 | type_quals &= ~(TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE); |
| 511 | } |
| 512 | |
| 513 | /* A restrict-qualified type must be a pointer (or reference) |
| 514 | to object or incomplete type. */ |
| 515 | if ((type_quals & TYPE_QUAL_RESTRICT) |
| 516 | && TREE_CODE (type) != TEMPLATE_TYPE_PARM |
| 517 | && TREE_CODE (type) != TYPENAME_TYPE |
| 518 | && !POINTER_TYPE_P (type)) |
| 519 | { |
| 520 | bad_quals |= TYPE_QUAL_RESTRICT; |
| 521 | type_quals &= ~TYPE_QUAL_RESTRICT; |
| 522 | } |
| 523 | |
| 524 | if (bad_quals == TYPE_UNQUALIFIED) |
| 525 | /*OK*/; |
| 526 | else if (!(complain & (tf_error | tf_ignore_bad_quals))) |
| 527 | return error_mark_node; |
| 528 | else if (bad_func_quals && !(complain & tf_error)) |
| 529 | return error_mark_node; |
| 530 | else |
| 531 | { |
| 532 | if (complain & tf_ignore_bad_quals) |
| 533 | /* We're not going to warn about constifying things that can't |
| 534 | be constified. */ |
| 535 | bad_quals &= ~TYPE_QUAL_CONST; |
| 536 | bad_quals |= bad_func_quals; |
| 537 | if (bad_quals) |
| 538 | { |
| 539 | tree bad_type = build_qualified_type (ptr_type_node, bad_quals); |
| 540 | |
| 541 | if (!(complain & tf_ignore_bad_quals) |
| 542 | || bad_func_quals) |
| 543 | error ("`%V' qualifiers cannot be applied to `%T'", |
| 544 | bad_type, type); |
| 545 | } |
| 546 | } |
| 547 | |
| 548 | /* Retrieve (or create) the appropriately qualified variant. */ |
| 549 | result = build_qualified_type (type, type_quals); |
| 550 | |
| 551 | /* If this was a pointer-to-method type, and we just made a copy, |
| 552 | then we need to unshare the record that holds the cached |
| 553 | pointer-to-member-function type, because these will be distinct |
| 554 | between the unqualified and qualified types. */ |
| 555 | if (result != type |
| 556 | && TREE_CODE (type) == POINTER_TYPE |
| 557 | && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE) |
| 558 | TYPE_LANG_SPECIFIC (result) = NULL; |
| 559 | |
| 560 | return result; |
| 561 | } |
| 562 | |
| 563 | /* Returns the canonical version of TYPE. In other words, if TYPE is |
| 564 | a typedef, returns the underlying type. The cv-qualification of |
| 565 | the type returned matches the type input; they will always be |
| 566 | compatible types. */ |
| 567 | |
| 568 | tree |
| 569 | canonical_type_variant (tree t) |
| 570 | { |
| 571 | return cp_build_qualified_type (TYPE_MAIN_VARIANT (t), cp_type_quals (t)); |
| 572 | } |
| 573 | \f |
| 574 | /* Makes new binfos for the indirect bases under BINFO. T is the most |
| 575 | derived TYPE. PREV is the previous binfo, whose TREE_CHAIN we make |
| 576 | point to this binfo. We return the last BINFO created. |
| 577 | |
| 578 | The CLASSTYPE_VBASECLASSES list of T is constructed in reverse |
| 579 | order (pre-order, depth-first, right-to-left). You must nreverse it. |
| 580 | |
| 581 | The BINFO_INHERITANCE of a virtual base class points to the binfo |
| 582 | og the most derived type. |
| 583 | |
| 584 | The binfo's TREE_CHAIN is set to inheritance graph order, but bases |
| 585 | for non-class types are not included (i.e. those which are |
| 586 | dependent bases in non-instantiated templates). */ |
| 587 | |
| 588 | tree |
| 589 | copy_base_binfos (tree binfo, tree t, tree prev) |
| 590 | { |
| 591 | tree binfos = BINFO_BASETYPES (binfo); |
| 592 | int n, ix; |
| 593 | |
| 594 | if (prev) |
| 595 | TREE_CHAIN (prev) = binfo; |
| 596 | prev = binfo; |
| 597 | |
| 598 | if (binfos == NULL_TREE) |
| 599 | return prev; |
| 600 | |
| 601 | n = TREE_VEC_LENGTH (binfos); |
| 602 | |
| 603 | /* Now copy the structure beneath BINFO. */ |
| 604 | for (ix = 0; ix != n; ix++) |
| 605 | { |
| 606 | tree base_binfo = TREE_VEC_ELT (binfos, ix); |
| 607 | tree new_binfo = NULL_TREE; |
| 608 | |
| 609 | if (!CLASS_TYPE_P (BINFO_TYPE (base_binfo))) |
| 610 | { |
| 611 | my_friendly_assert (binfo == TYPE_BINFO (t), 20030204); |
| 612 | |
| 613 | new_binfo = base_binfo; |
| 614 | TREE_CHAIN (prev) = new_binfo; |
| 615 | prev = new_binfo; |
| 616 | BINFO_INHERITANCE_CHAIN (new_binfo) = binfo; |
| 617 | BINFO_DEPENDENT_BASE_P (new_binfo) = 1; |
| 618 | } |
| 619 | else if (TREE_VIA_VIRTUAL (base_binfo)) |
| 620 | { |
| 621 | new_binfo = purpose_member (BINFO_TYPE (base_binfo), |
| 622 | CLASSTYPE_VBASECLASSES (t)); |
| 623 | if (new_binfo) |
| 624 | new_binfo = TREE_VALUE (new_binfo); |
| 625 | } |
| 626 | |
| 627 | if (!new_binfo) |
| 628 | { |
| 629 | new_binfo = make_binfo (BINFO_OFFSET (base_binfo), |
| 630 | base_binfo, NULL_TREE, |
| 631 | BINFO_VIRTUALS (base_binfo)); |
| 632 | prev = copy_base_binfos (new_binfo, t, prev); |
| 633 | if (TREE_VIA_VIRTUAL (base_binfo)) |
| 634 | { |
| 635 | CLASSTYPE_VBASECLASSES (t) |
| 636 | = tree_cons (BINFO_TYPE (new_binfo), new_binfo, |
| 637 | CLASSTYPE_VBASECLASSES (t)); |
| 638 | TREE_VIA_VIRTUAL (new_binfo) = 1; |
| 639 | BINFO_INHERITANCE_CHAIN (new_binfo) = TYPE_BINFO (t); |
| 640 | } |
| 641 | else |
| 642 | BINFO_INHERITANCE_CHAIN (new_binfo) = binfo; |
| 643 | } |
| 644 | TREE_VEC_ELT (binfos, ix) = new_binfo; |
| 645 | } |
| 646 | |
| 647 | return prev; |
| 648 | } |
| 649 | |
| 650 | \f |
| 651 | /* Hashing of lists so that we don't make duplicates. |
| 652 | The entry point is `list_hash_canon'. */ |
| 653 | |
| 654 | /* Now here is the hash table. When recording a list, it is added |
| 655 | to the slot whose index is the hash code mod the table size. |
| 656 | Note that the hash table is used for several kinds of lists. |
| 657 | While all these live in the same table, they are completely independent, |
| 658 | and the hash code is computed differently for each of these. */ |
| 659 | |
| 660 | static GTY ((param_is (union tree_node))) htab_t list_hash_table; |
| 661 | |
| 662 | struct list_proxy |
| 663 | { |
| 664 | tree purpose; |
| 665 | tree value; |
| 666 | tree chain; |
| 667 | }; |
| 668 | |
| 669 | /* Compare ENTRY (an entry in the hash table) with DATA (a list_proxy |
| 670 | for a node we are thinking about adding). */ |
| 671 | |
| 672 | static int |
| 673 | list_hash_eq (const void* entry, const void* data) |
| 674 | { |
| 675 | tree t = (tree) entry; |
| 676 | struct list_proxy *proxy = (struct list_proxy *) data; |
| 677 | |
| 678 | return (TREE_VALUE (t) == proxy->value |
| 679 | && TREE_PURPOSE (t) == proxy->purpose |
| 680 | && TREE_CHAIN (t) == proxy->chain); |
| 681 | } |
| 682 | |
| 683 | /* Compute a hash code for a list (chain of TREE_LIST nodes |
| 684 | with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the |
| 685 | TREE_COMMON slots), by adding the hash codes of the individual entries. */ |
| 686 | |
| 687 | static hashval_t |
| 688 | list_hash_pieces (tree purpose, tree value, tree chain) |
| 689 | { |
| 690 | hashval_t hashcode = 0; |
| 691 | |
| 692 | if (chain) |
| 693 | hashcode += TYPE_HASH (chain); |
| 694 | |
| 695 | if (value) |
| 696 | hashcode += TYPE_HASH (value); |
| 697 | else |
| 698 | hashcode += 1007; |
| 699 | if (purpose) |
| 700 | hashcode += TYPE_HASH (purpose); |
| 701 | else |
| 702 | hashcode += 1009; |
| 703 | return hashcode; |
| 704 | } |
| 705 | |
| 706 | /* Hash an already existing TREE_LIST. */ |
| 707 | |
| 708 | static hashval_t |
| 709 | list_hash (const void* p) |
| 710 | { |
| 711 | tree t = (tree) p; |
| 712 | return list_hash_pieces (TREE_PURPOSE (t), |
| 713 | TREE_VALUE (t), |
| 714 | TREE_CHAIN (t)); |
| 715 | } |
| 716 | |
| 717 | /* Given list components PURPOSE, VALUE, AND CHAIN, return the canonical |
| 718 | object for an identical list if one already exists. Otherwise, build a |
| 719 | new one, and record it as the canonical object. */ |
| 720 | |
| 721 | tree |
| 722 | hash_tree_cons (tree purpose, tree value, tree chain) |
| 723 | { |
| 724 | int hashcode = 0; |
| 725 | void **slot; |
| 726 | struct list_proxy proxy; |
| 727 | |
| 728 | /* Hash the list node. */ |
| 729 | hashcode = list_hash_pieces (purpose, value, chain); |
| 730 | /* Create a proxy for the TREE_LIST we would like to create. We |
| 731 | don't actually create it so as to avoid creating garbage. */ |
| 732 | proxy.purpose = purpose; |
| 733 | proxy.value = value; |
| 734 | proxy.chain = chain; |
| 735 | /* See if it is already in the table. */ |
| 736 | slot = htab_find_slot_with_hash (list_hash_table, &proxy, hashcode, |
| 737 | INSERT); |
| 738 | /* If not, create a new node. */ |
| 739 | if (!*slot) |
| 740 | *slot = tree_cons (purpose, value, chain); |
| 741 | return *slot; |
| 742 | } |
| 743 | |
| 744 | /* Constructor for hashed lists. */ |
| 745 | |
| 746 | tree |
| 747 | hash_tree_chain (tree value, tree chain) |
| 748 | { |
| 749 | return hash_tree_cons (NULL_TREE, value, chain); |
| 750 | } |
| 751 | |
| 752 | /* Similar, but used for concatenating two lists. */ |
| 753 | |
| 754 | tree |
| 755 | hash_chainon (tree list1, tree list2) |
| 756 | { |
| 757 | if (list2 == 0) |
| 758 | return list1; |
| 759 | if (list1 == 0) |
| 760 | return list2; |
| 761 | if (TREE_CHAIN (list1) == NULL_TREE) |
| 762 | return hash_tree_chain (TREE_VALUE (list1), list2); |
| 763 | return hash_tree_chain (TREE_VALUE (list1), |
| 764 | hash_chainon (TREE_CHAIN (list1), list2)); |
| 765 | } |
| 766 | \f |
| 767 | /* Build an association between TYPE and some parameters: |
| 768 | |
| 769 | OFFSET is the offset added to `this' to convert it to a pointer |
| 770 | of type `TYPE *' |
| 771 | |
| 772 | BINFO is the base binfo to use, if we are deriving from one. This |
| 773 | is necessary, as we want specialized parent binfos from base |
| 774 | classes, so that the VTABLE_NAMEs of bases are for the most derived |
| 775 | type, instead of the simple type. |
| 776 | |
| 777 | VTABLE is the virtual function table with which to initialize |
| 778 | sub-objects of type TYPE. |
| 779 | |
| 780 | VIRTUALS are the virtual functions sitting in VTABLE. */ |
| 781 | |
| 782 | tree |
| 783 | make_binfo (tree offset, tree binfo, tree vtable, tree virtuals) |
| 784 | { |
| 785 | tree new_binfo = make_tree_vec (BINFO_LANG_ELTS); |
| 786 | tree type; |
| 787 | |
| 788 | if (TREE_CODE (binfo) == TREE_VEC) |
| 789 | { |
| 790 | type = BINFO_TYPE (binfo); |
| 791 | BINFO_DEPENDENT_BASE_P (new_binfo) = BINFO_DEPENDENT_BASE_P (binfo); |
| 792 | } |
| 793 | else |
| 794 | { |
| 795 | type = binfo; |
| 796 | binfo = NULL_TREE; |
| 797 | BINFO_DEPENDENT_BASE_P (new_binfo) = 1; |
| 798 | } |
| 799 | |
| 800 | TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type); |
| 801 | BINFO_OFFSET (new_binfo) = offset; |
| 802 | BINFO_VTABLE (new_binfo) = vtable; |
| 803 | BINFO_VIRTUALS (new_binfo) = virtuals; |
| 804 | |
| 805 | if (binfo && !BINFO_DEPENDENT_BASE_P (binfo) |
| 806 | && BINFO_BASETYPES (binfo) != NULL_TREE) |
| 807 | { |
| 808 | BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo)); |
| 809 | /* We do not need to copy the accesses, as they are read only. */ |
| 810 | BINFO_BASEACCESSES (new_binfo) = BINFO_BASEACCESSES (binfo); |
| 811 | } |
| 812 | return new_binfo; |
| 813 | } |
| 814 | |
| 815 | void |
| 816 | debug_binfo (tree elem) |
| 817 | { |
| 818 | HOST_WIDE_INT n; |
| 819 | tree virtuals; |
| 820 | |
| 821 | fprintf (stderr, "type \"%s\", offset = " HOST_WIDE_INT_PRINT_DEC |
| 822 | "\nvtable type:\n", |
| 823 | TYPE_NAME_STRING (BINFO_TYPE (elem)), |
| 824 | TREE_INT_CST_LOW (BINFO_OFFSET (elem))); |
| 825 | debug_tree (BINFO_TYPE (elem)); |
| 826 | if (BINFO_VTABLE (elem)) |
| 827 | fprintf (stderr, "vtable decl \"%s\"\n", |
| 828 | IDENTIFIER_POINTER (DECL_NAME (get_vtbl_decl_for_binfo (elem)))); |
| 829 | else |
| 830 | fprintf (stderr, "no vtable decl yet\n"); |
| 831 | fprintf (stderr, "virtuals:\n"); |
| 832 | virtuals = BINFO_VIRTUALS (elem); |
| 833 | n = 0; |
| 834 | |
| 835 | while (virtuals) |
| 836 | { |
| 837 | tree fndecl = TREE_VALUE (virtuals); |
| 838 | fprintf (stderr, "%s [%ld =? %ld]\n", |
| 839 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)), |
| 840 | (long) n, (long) TREE_INT_CST_LOW (DECL_VINDEX (fndecl))); |
| 841 | ++n; |
| 842 | virtuals = TREE_CHAIN (virtuals); |
| 843 | } |
| 844 | } |
| 845 | |
| 846 | int |
| 847 | count_functions (tree t) |
| 848 | { |
| 849 | int i; |
| 850 | if (TREE_CODE (t) == FUNCTION_DECL) |
| 851 | return 1; |
| 852 | else if (TREE_CODE (t) == OVERLOAD) |
| 853 | { |
| 854 | for (i = 0; t; t = OVL_CHAIN (t)) |
| 855 | i++; |
| 856 | return i; |
| 857 | } |
| 858 | |
| 859 | abort (); |
| 860 | return 0; |
| 861 | } |
| 862 | |
| 863 | int |
| 864 | is_overloaded_fn (tree x) |
| 865 | { |
| 866 | /* A baselink is also considered an overloaded function. */ |
| 867 | if (TREE_CODE (x) == OFFSET_REF) |
| 868 | x = TREE_OPERAND (x, 1); |
| 869 | if (BASELINK_P (x)) |
| 870 | x = BASELINK_FUNCTIONS (x); |
| 871 | return (TREE_CODE (x) == FUNCTION_DECL |
| 872 | || TREE_CODE (x) == TEMPLATE_ID_EXPR |
| 873 | || DECL_FUNCTION_TEMPLATE_P (x) |
| 874 | || TREE_CODE (x) == OVERLOAD); |
| 875 | } |
| 876 | |
| 877 | int |
| 878 | really_overloaded_fn (tree x) |
| 879 | { |
| 880 | /* A baselink is also considered an overloaded function. */ |
| 881 | if (TREE_CODE (x) == OFFSET_REF) |
| 882 | x = TREE_OPERAND (x, 1); |
| 883 | if (BASELINK_P (x)) |
| 884 | x = BASELINK_FUNCTIONS (x); |
| 885 | |
| 886 | return ((TREE_CODE (x) == OVERLOAD && OVL_CHAIN (x)) |
| 887 | || DECL_FUNCTION_TEMPLATE_P (OVL_CURRENT (x)) |
| 888 | || TREE_CODE (x) == TEMPLATE_ID_EXPR); |
| 889 | } |
| 890 | |
| 891 | tree |
| 892 | get_first_fn (tree from) |
| 893 | { |
| 894 | my_friendly_assert (is_overloaded_fn (from), 9); |
| 895 | /* A baselink is also considered an overloaded function. */ |
| 896 | if (BASELINK_P (from)) |
| 897 | from = BASELINK_FUNCTIONS (from); |
| 898 | return OVL_CURRENT (from); |
| 899 | } |
| 900 | |
| 901 | /* Returns nonzero if T is a ->* or .* expression that refers to a |
| 902 | member function. */ |
| 903 | |
| 904 | int |
| 905 | bound_pmf_p (tree t) |
| 906 | { |
| 907 | return (TREE_CODE (t) == OFFSET_REF |
| 908 | && TYPE_PTRMEMFUNC_P (TREE_TYPE (TREE_OPERAND (t, 1)))); |
| 909 | } |
| 910 | |
| 911 | /* Return a new OVL node, concatenating it with the old one. */ |
| 912 | |
| 913 | tree |
| 914 | ovl_cons (tree decl, tree chain) |
| 915 | { |
| 916 | tree result = make_node (OVERLOAD); |
| 917 | TREE_TYPE (result) = unknown_type_node; |
| 918 | OVL_FUNCTION (result) = decl; |
| 919 | TREE_CHAIN (result) = chain; |
| 920 | |
| 921 | return result; |
| 922 | } |
| 923 | |
| 924 | /* Build a new overloaded function. If this is the first one, |
| 925 | just return it; otherwise, ovl_cons the _DECLs */ |
| 926 | |
| 927 | tree |
| 928 | build_overload (tree decl, tree chain) |
| 929 | { |
| 930 | if (! chain && TREE_CODE (decl) != TEMPLATE_DECL) |
| 931 | return decl; |
| 932 | if (chain && TREE_CODE (chain) != OVERLOAD) |
| 933 | chain = ovl_cons (chain, NULL_TREE); |
| 934 | return ovl_cons (decl, chain); |
| 935 | } |
| 936 | |
| 937 | \f |
| 938 | #define PRINT_RING_SIZE 4 |
| 939 | |
| 940 | const char * |
| 941 | cxx_printable_name (tree decl, int v) |
| 942 | { |
| 943 | static tree decl_ring[PRINT_RING_SIZE]; |
| 944 | static char *print_ring[PRINT_RING_SIZE]; |
| 945 | static int ring_counter; |
| 946 | int i; |
| 947 | |
| 948 | /* Only cache functions. */ |
| 949 | if (v < 2 |
| 950 | || TREE_CODE (decl) != FUNCTION_DECL |
| 951 | || DECL_LANG_SPECIFIC (decl) == 0) |
| 952 | return lang_decl_name (decl, v); |
| 953 | |
| 954 | /* See if this print name is lying around. */ |
| 955 | for (i = 0; i < PRINT_RING_SIZE; i++) |
| 956 | if (decl_ring[i] == decl) |
| 957 | /* yes, so return it. */ |
| 958 | return print_ring[i]; |
| 959 | |
| 960 | if (++ring_counter == PRINT_RING_SIZE) |
| 961 | ring_counter = 0; |
| 962 | |
| 963 | if (current_function_decl != NULL_TREE) |
| 964 | { |
| 965 | if (decl_ring[ring_counter] == current_function_decl) |
| 966 | ring_counter += 1; |
| 967 | if (ring_counter == PRINT_RING_SIZE) |
| 968 | ring_counter = 0; |
| 969 | if (decl_ring[ring_counter] == current_function_decl) |
| 970 | abort (); |
| 971 | } |
| 972 | |
| 973 | if (print_ring[ring_counter]) |
| 974 | free (print_ring[ring_counter]); |
| 975 | |
| 976 | print_ring[ring_counter] = xstrdup (lang_decl_name (decl, v)); |
| 977 | decl_ring[ring_counter] = decl; |
| 978 | return print_ring[ring_counter]; |
| 979 | } |
| 980 | \f |
| 981 | /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions |
| 982 | listed in RAISES. */ |
| 983 | |
| 984 | tree |
| 985 | build_exception_variant (tree type, tree raises) |
| 986 | { |
| 987 | tree v = TYPE_MAIN_VARIANT (type); |
| 988 | int type_quals = TYPE_QUALS (type); |
| 989 | |
| 990 | for (; v; v = TYPE_NEXT_VARIANT (v)) |
| 991 | if (TYPE_QUALS (v) == type_quals |
| 992 | && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (v), 1) |
| 993 | && (*targetm.comp_type_attributes) (type, v)) |
| 994 | return v; |
| 995 | |
| 996 | /* Need to build a new variant. */ |
| 997 | v = build_type_copy (type); |
| 998 | TYPE_RAISES_EXCEPTIONS (v) = raises; |
| 999 | return v; |
| 1000 | } |
| 1001 | |
| 1002 | /* Given a TEMPLATE_TEMPLATE_PARM node T, create a new |
| 1003 | BOUND_TEMPLATE_TEMPLATE_PARM bound with NEWARGS as its template |
| 1004 | arguments. */ |
| 1005 | |
| 1006 | tree |
| 1007 | bind_template_template_parm (tree t, tree newargs) |
| 1008 | { |
| 1009 | tree decl = TYPE_NAME (t); |
| 1010 | tree t2; |
| 1011 | |
| 1012 | t2 = make_aggr_type (BOUND_TEMPLATE_TEMPLATE_PARM); |
| 1013 | decl = build_decl (TYPE_DECL, DECL_NAME (decl), NULL_TREE); |
| 1014 | |
| 1015 | /* These nodes have to be created to reflect new TYPE_DECL and template |
| 1016 | arguments. */ |
| 1017 | TEMPLATE_TYPE_PARM_INDEX (t2) = copy_node (TEMPLATE_TYPE_PARM_INDEX (t)); |
| 1018 | TEMPLATE_PARM_DECL (TEMPLATE_TYPE_PARM_INDEX (t2)) = decl; |
| 1019 | TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t2) |
| 1020 | = tree_cons (TEMPLATE_TEMPLATE_PARM_TEMPLATE_DECL (t), |
| 1021 | newargs, NULL_TREE); |
| 1022 | |
| 1023 | TREE_TYPE (decl) = t2; |
| 1024 | TYPE_NAME (t2) = decl; |
| 1025 | TYPE_STUB_DECL (t2) = decl; |
| 1026 | TYPE_SIZE (t2) = 0; |
| 1027 | |
| 1028 | return t2; |
| 1029 | } |
| 1030 | |
| 1031 | /* Called from count_trees via walk_tree. */ |
| 1032 | |
| 1033 | static tree |
| 1034 | count_trees_r (tree* tp ATTRIBUTE_UNUSED , |
| 1035 | int* walk_subtrees ATTRIBUTE_UNUSED , |
| 1036 | void* data) |
| 1037 | { |
| 1038 | ++ *((int*) data); |
| 1039 | return NULL_TREE; |
| 1040 | } |
| 1041 | |
| 1042 | /* Debugging function for measuring the rough complexity of a tree |
| 1043 | representation. */ |
| 1044 | |
| 1045 | int |
| 1046 | count_trees (tree t) |
| 1047 | { |
| 1048 | int n_trees = 0; |
| 1049 | walk_tree_without_duplicates (&t, count_trees_r, &n_trees); |
| 1050 | return n_trees; |
| 1051 | } |
| 1052 | |
| 1053 | /* Called from verify_stmt_tree via walk_tree. */ |
| 1054 | |
| 1055 | static tree |
| 1056 | verify_stmt_tree_r (tree* tp, |
| 1057 | int* walk_subtrees ATTRIBUTE_UNUSED , |
| 1058 | void* data) |
| 1059 | { |
| 1060 | tree t = *tp; |
| 1061 | htab_t *statements = (htab_t *) data; |
| 1062 | void **slot; |
| 1063 | |
| 1064 | if (!STATEMENT_CODE_P (TREE_CODE (t))) |
| 1065 | return NULL_TREE; |
| 1066 | |
| 1067 | /* If this statement is already present in the hash table, then |
| 1068 | there is a circularity in the statement tree. */ |
| 1069 | if (htab_find (*statements, t)) |
| 1070 | abort (); |
| 1071 | |
| 1072 | slot = htab_find_slot (*statements, t, INSERT); |
| 1073 | *slot = t; |
| 1074 | |
| 1075 | return NULL_TREE; |
| 1076 | } |
| 1077 | |
| 1078 | /* Debugging function to check that the statement T has not been |
| 1079 | corrupted. For now, this function simply checks that T contains no |
| 1080 | circularities. */ |
| 1081 | |
| 1082 | void |
| 1083 | verify_stmt_tree (tree t) |
| 1084 | { |
| 1085 | htab_t statements; |
| 1086 | statements = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL); |
| 1087 | walk_tree (&t, verify_stmt_tree_r, &statements, NULL); |
| 1088 | htab_delete (statements); |
| 1089 | } |
| 1090 | |
| 1091 | /* Called from find_tree via walk_tree. */ |
| 1092 | |
| 1093 | static tree |
| 1094 | find_tree_r (tree* tp, |
| 1095 | int* walk_subtrees ATTRIBUTE_UNUSED , |
| 1096 | void* data) |
| 1097 | { |
| 1098 | if (*tp == (tree) data) |
| 1099 | return (tree) data; |
| 1100 | |
| 1101 | return NULL_TREE; |
| 1102 | } |
| 1103 | |
| 1104 | /* Returns X if X appears in the tree structure rooted at T. */ |
| 1105 | |
| 1106 | tree |
| 1107 | find_tree (tree t, tree x) |
| 1108 | { |
| 1109 | return walk_tree_without_duplicates (&t, find_tree_r, x); |
| 1110 | } |
| 1111 | |
| 1112 | /* Passed to walk_tree. Checks for the use of types with no linkage. */ |
| 1113 | |
| 1114 | static tree |
| 1115 | no_linkage_helper (tree* tp, |
| 1116 | int* walk_subtrees ATTRIBUTE_UNUSED , |
| 1117 | void* data ATTRIBUTE_UNUSED ) |
| 1118 | { |
| 1119 | tree t = *tp; |
| 1120 | |
| 1121 | if (TYPE_P (t) |
| 1122 | && (CLASS_TYPE_P (t) || TREE_CODE (t) == ENUMERAL_TYPE) |
| 1123 | && (decl_function_context (TYPE_MAIN_DECL (t)) |
| 1124 | || TYPE_ANONYMOUS_P (t))) |
| 1125 | return t; |
| 1126 | return NULL_TREE; |
| 1127 | } |
| 1128 | |
| 1129 | /* Check if the type T depends on a type with no linkage and if so, return |
| 1130 | it. */ |
| 1131 | |
| 1132 | tree |
| 1133 | no_linkage_check (tree t) |
| 1134 | { |
| 1135 | /* There's no point in checking linkage on template functions; we |
| 1136 | can't know their complete types. */ |
| 1137 | if (processing_template_decl) |
| 1138 | return NULL_TREE; |
| 1139 | |
| 1140 | t = walk_tree_without_duplicates (&t, no_linkage_helper, NULL); |
| 1141 | if (t != error_mark_node) |
| 1142 | return t; |
| 1143 | return NULL_TREE; |
| 1144 | } |
| 1145 | |
| 1146 | #ifdef GATHER_STATISTICS |
| 1147 | extern int depth_reached; |
| 1148 | #endif |
| 1149 | |
| 1150 | void |
| 1151 | cxx_print_statistics (void) |
| 1152 | { |
| 1153 | print_search_statistics (); |
| 1154 | print_class_statistics (); |
| 1155 | #ifdef GATHER_STATISTICS |
| 1156 | fprintf (stderr, "maximum template instantiation depth reached: %d\n", |
| 1157 | depth_reached); |
| 1158 | #endif |
| 1159 | } |
| 1160 | |
| 1161 | /* Return, as an INTEGER_CST node, the number of elements for TYPE |
| 1162 | (which is an ARRAY_TYPE). This counts only elements of the top |
| 1163 | array. */ |
| 1164 | |
| 1165 | tree |
| 1166 | array_type_nelts_top (tree type) |
| 1167 | { |
| 1168 | return fold (build (PLUS_EXPR, sizetype, |
| 1169 | array_type_nelts (type), |
| 1170 | integer_one_node)); |
| 1171 | } |
| 1172 | |
| 1173 | /* Return, as an INTEGER_CST node, the number of elements for TYPE |
| 1174 | (which is an ARRAY_TYPE). This one is a recursive count of all |
| 1175 | ARRAY_TYPEs that are clumped together. */ |
| 1176 | |
| 1177 | tree |
| 1178 | array_type_nelts_total (tree type) |
| 1179 | { |
| 1180 | tree sz = array_type_nelts_top (type); |
| 1181 | type = TREE_TYPE (type); |
| 1182 | while (TREE_CODE (type) == ARRAY_TYPE) |
| 1183 | { |
| 1184 | tree n = array_type_nelts_top (type); |
| 1185 | sz = fold (build (MULT_EXPR, sizetype, sz, n)); |
| 1186 | type = TREE_TYPE (type); |
| 1187 | } |
| 1188 | return sz; |
| 1189 | } |
| 1190 | |
| 1191 | /* Called from break_out_target_exprs via mapcar. */ |
| 1192 | |
| 1193 | static tree |
| 1194 | bot_manip (tree* tp, int* walk_subtrees, void* data) |
| 1195 | { |
| 1196 | splay_tree target_remap = ((splay_tree) data); |
| 1197 | tree t = *tp; |
| 1198 | |
| 1199 | if (TREE_CONSTANT (t)) |
| 1200 | { |
| 1201 | /* There can't be any TARGET_EXPRs or their slot variables below |
| 1202 | this point. We used to check !TREE_SIDE_EFFECTS, but then we |
| 1203 | failed to copy an ADDR_EXPR of the slot VAR_DECL. */ |
| 1204 | *walk_subtrees = 0; |
| 1205 | return NULL_TREE; |
| 1206 | } |
| 1207 | if (TREE_CODE (t) == TARGET_EXPR) |
| 1208 | { |
| 1209 | tree u; |
| 1210 | |
| 1211 | if (TREE_CODE (TREE_OPERAND (t, 1)) == AGGR_INIT_EXPR) |
| 1212 | { |
| 1213 | mark_used (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 1), 0), 0)); |
| 1214 | u = build_cplus_new |
| 1215 | (TREE_TYPE (t), break_out_target_exprs (TREE_OPERAND (t, 1))); |
| 1216 | } |
| 1217 | else |
| 1218 | { |
| 1219 | u = build_target_expr_with_type |
| 1220 | (break_out_target_exprs (TREE_OPERAND (t, 1)), TREE_TYPE (t)); |
| 1221 | } |
| 1222 | |
| 1223 | /* Map the old variable to the new one. */ |
| 1224 | splay_tree_insert (target_remap, |
| 1225 | (splay_tree_key) TREE_OPERAND (t, 0), |
| 1226 | (splay_tree_value) TREE_OPERAND (u, 0)); |
| 1227 | |
| 1228 | /* Replace the old expression with the new version. */ |
| 1229 | *tp = u; |
| 1230 | /* We don't have to go below this point; the recursive call to |
| 1231 | break_out_target_exprs will have handled anything below this |
| 1232 | point. */ |
| 1233 | *walk_subtrees = 0; |
| 1234 | return NULL_TREE; |
| 1235 | } |
| 1236 | else if (TREE_CODE (t) == CALL_EXPR) |
| 1237 | mark_used (TREE_OPERAND (TREE_OPERAND (t, 0), 0)); |
| 1238 | |
| 1239 | /* Make a copy of this node. */ |
| 1240 | return copy_tree_r (tp, walk_subtrees, NULL); |
| 1241 | } |
| 1242 | |
| 1243 | /* Replace all remapped VAR_DECLs in T with their new equivalents. |
| 1244 | DATA is really a splay-tree mapping old variables to new |
| 1245 | variables. */ |
| 1246 | |
| 1247 | static tree |
| 1248 | bot_replace (tree* t, |
| 1249 | int* walk_subtrees ATTRIBUTE_UNUSED , |
| 1250 | void* data) |
| 1251 | { |
| 1252 | splay_tree target_remap = ((splay_tree) data); |
| 1253 | |
| 1254 | if (TREE_CODE (*t) == VAR_DECL) |
| 1255 | { |
| 1256 | splay_tree_node n = splay_tree_lookup (target_remap, |
| 1257 | (splay_tree_key) *t); |
| 1258 | if (n) |
| 1259 | *t = (tree) n->value; |
| 1260 | } |
| 1261 | |
| 1262 | return NULL_TREE; |
| 1263 | } |
| 1264 | |
| 1265 | /* When we parse a default argument expression, we may create |
| 1266 | temporary variables via TARGET_EXPRs. When we actually use the |
| 1267 | default-argument expression, we make a copy of the expression, but |
| 1268 | we must replace the temporaries with appropriate local versions. */ |
| 1269 | |
| 1270 | tree |
| 1271 | break_out_target_exprs (tree t) |
| 1272 | { |
| 1273 | static int target_remap_count; |
| 1274 | static splay_tree target_remap; |
| 1275 | |
| 1276 | if (!target_remap_count++) |
| 1277 | target_remap = splay_tree_new (splay_tree_compare_pointers, |
| 1278 | /*splay_tree_delete_key_fn=*/NULL, |
| 1279 | /*splay_tree_delete_value_fn=*/NULL); |
| 1280 | walk_tree (&t, bot_manip, target_remap, NULL); |
| 1281 | walk_tree (&t, bot_replace, target_remap, NULL); |
| 1282 | |
| 1283 | if (!--target_remap_count) |
| 1284 | { |
| 1285 | splay_tree_delete (target_remap); |
| 1286 | target_remap = NULL; |
| 1287 | } |
| 1288 | |
| 1289 | return t; |
| 1290 | } |
| 1291 | |
| 1292 | /* Similar to `build_nt', but for template definitions of dependent |
| 1293 | expressions */ |
| 1294 | |
| 1295 | tree |
| 1296 | build_min_nt (enum tree_code code, ...) |
| 1297 | { |
| 1298 | tree t; |
| 1299 | int length; |
| 1300 | int i; |
| 1301 | va_list p; |
| 1302 | |
| 1303 | va_start (p, code); |
| 1304 | |
| 1305 | t = make_node (code); |
| 1306 | length = TREE_CODE_LENGTH (code); |
| 1307 | TREE_COMPLEXITY (t) = input_line; |
| 1308 | |
| 1309 | for (i = 0; i < length; i++) |
| 1310 | { |
| 1311 | tree x = va_arg (p, tree); |
| 1312 | TREE_OPERAND (t, i) = x; |
| 1313 | } |
| 1314 | |
| 1315 | va_end (p); |
| 1316 | return t; |
| 1317 | } |
| 1318 | |
| 1319 | /* Similar to `build', but for template definitions. */ |
| 1320 | |
| 1321 | tree |
| 1322 | build_min (enum tree_code code, tree tt, ...) |
| 1323 | { |
| 1324 | tree t; |
| 1325 | int length; |
| 1326 | int i; |
| 1327 | va_list p; |
| 1328 | |
| 1329 | va_start (p, tt); |
| 1330 | |
| 1331 | t = make_node (code); |
| 1332 | length = TREE_CODE_LENGTH (code); |
| 1333 | TREE_TYPE (t) = tt; |
| 1334 | TREE_COMPLEXITY (t) = input_line; |
| 1335 | |
| 1336 | for (i = 0; i < length; i++) |
| 1337 | { |
| 1338 | tree x = va_arg (p, tree); |
| 1339 | TREE_OPERAND (t, i) = x; |
| 1340 | if (x && TREE_SIDE_EFFECTS (x)) |
| 1341 | TREE_SIDE_EFFECTS (t) = 1; |
| 1342 | } |
| 1343 | |
| 1344 | va_end (p); |
| 1345 | return t; |
| 1346 | } |
| 1347 | |
| 1348 | /* Similar to `build', but for template definitions of non-dependent |
| 1349 | expressions. NON_DEP is the non-dependent expression that has been |
| 1350 | built. */ |
| 1351 | |
| 1352 | tree |
| 1353 | build_min_non_dep (enum tree_code code, tree non_dep, ...) |
| 1354 | { |
| 1355 | tree t; |
| 1356 | int length; |
| 1357 | int i; |
| 1358 | va_list p; |
| 1359 | |
| 1360 | va_start (p, non_dep); |
| 1361 | |
| 1362 | t = make_node (code); |
| 1363 | length = TREE_CODE_LENGTH (code); |
| 1364 | TREE_TYPE (t) = TREE_TYPE (non_dep); |
| 1365 | TREE_COMPLEXITY (t) = input_line; |
| 1366 | TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (non_dep); |
| 1367 | |
| 1368 | for (i = 0; i < length; i++) |
| 1369 | { |
| 1370 | tree x = va_arg (p, tree); |
| 1371 | TREE_OPERAND (t, i) = x; |
| 1372 | } |
| 1373 | |
| 1374 | if (code == COMPOUND_EXPR && TREE_CODE (non_dep) != COMPOUND_EXPR) |
| 1375 | /* This should not be considered a COMPOUND_EXPR, because it |
| 1376 | resolves to an overload. */ |
| 1377 | COMPOUND_EXPR_OVERLOADED (t) = 1; |
| 1378 | |
| 1379 | va_end (p); |
| 1380 | return t; |
| 1381 | } |
| 1382 | |
| 1383 | /* Returns an INTEGER_CST (of type `int') corresponding to I. |
| 1384 | Multiple calls with the same value of I may or may not yield the |
| 1385 | same node; therefore, callers should never modify the node |
| 1386 | returned. */ |
| 1387 | |
| 1388 | static GTY(()) tree shared_int_cache[256]; |
| 1389 | |
| 1390 | tree |
| 1391 | build_shared_int_cst (int i) |
| 1392 | { |
| 1393 | if (i >= 256) |
| 1394 | return build_int_2 (i, 0); |
| 1395 | |
| 1396 | if (!shared_int_cache[i]) |
| 1397 | shared_int_cache[i] = build_int_2 (i, 0); |
| 1398 | |
| 1399 | return shared_int_cache[i]; |
| 1400 | } |
| 1401 | |
| 1402 | tree |
| 1403 | get_type_decl (tree t) |
| 1404 | { |
| 1405 | if (TREE_CODE (t) == TYPE_DECL) |
| 1406 | return t; |
| 1407 | if (TYPE_P (t)) |
| 1408 | return TYPE_STUB_DECL (t); |
| 1409 | if (t == error_mark_node) |
| 1410 | return t; |
| 1411 | |
| 1412 | abort (); |
| 1413 | |
| 1414 | /* Stop compiler from complaining control reaches end of non-void function. */ |
| 1415 | return 0; |
| 1416 | } |
| 1417 | |
| 1418 | /* Return first vector element whose BINFO_TYPE is ELEM. |
| 1419 | Return 0 if ELEM is not in VEC. VEC may be NULL_TREE. */ |
| 1420 | |
| 1421 | tree |
| 1422 | vec_binfo_member (tree elem, tree vec) |
| 1423 | { |
| 1424 | int i; |
| 1425 | |
| 1426 | if (vec) |
| 1427 | for (i = 0; i < TREE_VEC_LENGTH (vec); ++i) |
| 1428 | if (same_type_p (elem, BINFO_TYPE (TREE_VEC_ELT (vec, i)))) |
| 1429 | return TREE_VEC_ELT (vec, i); |
| 1430 | |
| 1431 | return NULL_TREE; |
| 1432 | } |
| 1433 | |
| 1434 | /* Returns the namespace that contains DECL, whether directly or |
| 1435 | indirectly. */ |
| 1436 | |
| 1437 | tree |
| 1438 | decl_namespace_context (tree decl) |
| 1439 | { |
| 1440 | while (1) |
| 1441 | { |
| 1442 | if (TREE_CODE (decl) == NAMESPACE_DECL) |
| 1443 | return decl; |
| 1444 | else if (TYPE_P (decl)) |
| 1445 | decl = CP_DECL_CONTEXT (TYPE_MAIN_DECL (decl)); |
| 1446 | else |
| 1447 | decl = CP_DECL_CONTEXT (decl); |
| 1448 | } |
| 1449 | } |
| 1450 | |
| 1451 | /* Return truthvalue of whether T1 is the same tree structure as T2. |
| 1452 | Return 1 if they are the same. Return 0 if they are different. */ |
| 1453 | |
| 1454 | bool |
| 1455 | cp_tree_equal (tree t1, tree t2) |
| 1456 | { |
| 1457 | enum tree_code code1, code2; |
| 1458 | |
| 1459 | if (t1 == t2) |
| 1460 | return true; |
| 1461 | if (!t1 || !t2) |
| 1462 | return false; |
| 1463 | |
| 1464 | for (code1 = TREE_CODE (t1); |
| 1465 | code1 == NOP_EXPR || code1 == CONVERT_EXPR |
| 1466 | || code1 == NON_LVALUE_EXPR; |
| 1467 | code1 = TREE_CODE (t1)) |
| 1468 | t1 = TREE_OPERAND (t1, 0); |
| 1469 | for (code2 = TREE_CODE (t2); |
| 1470 | code2 == NOP_EXPR || code2 == CONVERT_EXPR |
| 1471 | || code1 == NON_LVALUE_EXPR; |
| 1472 | code2 = TREE_CODE (t2)) |
| 1473 | t2 = TREE_OPERAND (t2, 0); |
| 1474 | |
| 1475 | /* They might have become equal now. */ |
| 1476 | if (t1 == t2) |
| 1477 | return true; |
| 1478 | |
| 1479 | if (code1 != code2) |
| 1480 | return false; |
| 1481 | |
| 1482 | switch (code1) |
| 1483 | { |
| 1484 | case INTEGER_CST: |
| 1485 | return TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2) |
| 1486 | && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2); |
| 1487 | |
| 1488 | case REAL_CST: |
| 1489 | return REAL_VALUES_EQUAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2)); |
| 1490 | |
| 1491 | case STRING_CST: |
| 1492 | return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) |
| 1493 | && !memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), |
| 1494 | TREE_STRING_LENGTH (t1)); |
| 1495 | |
| 1496 | case CONSTRUCTOR: |
| 1497 | /* We need to do this when determining whether or not two |
| 1498 | non-type pointer to member function template arguments |
| 1499 | are the same. */ |
| 1500 | if (!(same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)) |
| 1501 | /* The first operand is RTL. */ |
| 1502 | && TREE_OPERAND (t1, 0) == TREE_OPERAND (t2, 0))) |
| 1503 | return false; |
| 1504 | return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); |
| 1505 | |
| 1506 | case TREE_LIST: |
| 1507 | if (!cp_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))) |
| 1508 | return false; |
| 1509 | if (!cp_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2))) |
| 1510 | return false; |
| 1511 | return cp_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2)); |
| 1512 | |
| 1513 | case SAVE_EXPR: |
| 1514 | return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); |
| 1515 | |
| 1516 | case CALL_EXPR: |
| 1517 | if (!cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))) |
| 1518 | return false; |
| 1519 | return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); |
| 1520 | |
| 1521 | case TARGET_EXPR: |
| 1522 | { |
| 1523 | tree o1 = TREE_OPERAND (t1, 0); |
| 1524 | tree o2 = TREE_OPERAND (t2, 0); |
| 1525 | |
| 1526 | /* Special case: if either target is an unallocated VAR_DECL, |
| 1527 | it means that it's going to be unified with whatever the |
| 1528 | TARGET_EXPR is really supposed to initialize, so treat it |
| 1529 | as being equivalent to anything. */ |
| 1530 | if (TREE_CODE (o1) == VAR_DECL && DECL_NAME (o1) == NULL_TREE |
| 1531 | && !DECL_RTL_SET_P (o1)) |
| 1532 | /*Nop*/; |
| 1533 | else if (TREE_CODE (o2) == VAR_DECL && DECL_NAME (o2) == NULL_TREE |
| 1534 | && !DECL_RTL_SET_P (o2)) |
| 1535 | /*Nop*/; |
| 1536 | else if (!cp_tree_equal (o1, o2)) |
| 1537 | return false; |
| 1538 | |
| 1539 | return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); |
| 1540 | } |
| 1541 | |
| 1542 | case WITH_CLEANUP_EXPR: |
| 1543 | if (!cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))) |
| 1544 | return false; |
| 1545 | return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1)); |
| 1546 | |
| 1547 | case COMPONENT_REF: |
| 1548 | if (TREE_OPERAND (t1, 1) != TREE_OPERAND (t2, 1)) |
| 1549 | return false; |
| 1550 | return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); |
| 1551 | |
| 1552 | case VAR_DECL: |
| 1553 | case PARM_DECL: |
| 1554 | case CONST_DECL: |
| 1555 | case FUNCTION_DECL: |
| 1556 | case TEMPLATE_DECL: |
| 1557 | case IDENTIFIER_NODE: |
| 1558 | return false; |
| 1559 | |
| 1560 | case TEMPLATE_PARM_INDEX: |
| 1561 | return (TEMPLATE_PARM_IDX (t1) == TEMPLATE_PARM_IDX (t2) |
| 1562 | && TEMPLATE_PARM_LEVEL (t1) == TEMPLATE_PARM_LEVEL (t2) |
| 1563 | && same_type_p (TREE_TYPE (TEMPLATE_PARM_DECL (t1)), |
| 1564 | TREE_TYPE (TEMPLATE_PARM_DECL (t2)))); |
| 1565 | |
| 1566 | case TEMPLATE_ID_EXPR: |
| 1567 | { |
| 1568 | unsigned ix; |
| 1569 | tree vec1, vec2; |
| 1570 | |
| 1571 | if (!cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))) |
| 1572 | return false; |
| 1573 | vec1 = TREE_OPERAND (t1, 1); |
| 1574 | vec2 = TREE_OPERAND (t2, 1); |
| 1575 | |
| 1576 | if (!vec1 || !vec2) |
| 1577 | return !vec1 && !vec2; |
| 1578 | |
| 1579 | if (TREE_VEC_LENGTH (vec1) != TREE_VEC_LENGTH (vec2)) |
| 1580 | return false; |
| 1581 | |
| 1582 | for (ix = TREE_VEC_LENGTH (vec1); ix--;) |
| 1583 | if (!cp_tree_equal (TREE_VEC_ELT (vec1, ix), |
| 1584 | TREE_VEC_ELT (vec2, ix))) |
| 1585 | return false; |
| 1586 | |
| 1587 | return true; |
| 1588 | } |
| 1589 | |
| 1590 | case SIZEOF_EXPR: |
| 1591 | case ALIGNOF_EXPR: |
| 1592 | { |
| 1593 | tree o1 = TREE_OPERAND (t1, 0); |
| 1594 | tree o2 = TREE_OPERAND (t2, 0); |
| 1595 | |
| 1596 | if (TREE_CODE (o1) != TREE_CODE (o2)) |
| 1597 | return false; |
| 1598 | if (TYPE_P (o1)) |
| 1599 | return same_type_p (o1, o2); |
| 1600 | else |
| 1601 | return cp_tree_equal (o1, o2); |
| 1602 | } |
| 1603 | |
| 1604 | case PTRMEM_CST: |
| 1605 | /* Two pointer-to-members are the same if they point to the same |
| 1606 | field or function in the same class. */ |
| 1607 | if (PTRMEM_CST_MEMBER (t1) != PTRMEM_CST_MEMBER (t2)) |
| 1608 | return false; |
| 1609 | |
| 1610 | return same_type_p (PTRMEM_CST_CLASS (t1), PTRMEM_CST_CLASS (t2)); |
| 1611 | |
| 1612 | default: |
| 1613 | break; |
| 1614 | } |
| 1615 | |
| 1616 | switch (TREE_CODE_CLASS (code1)) |
| 1617 | { |
| 1618 | case '1': |
| 1619 | case '2': |
| 1620 | case '<': |
| 1621 | case 'e': |
| 1622 | case 'r': |
| 1623 | case 's': |
| 1624 | { |
| 1625 | int i; |
| 1626 | |
| 1627 | for (i = 0; i < TREE_CODE_LENGTH (code1); ++i) |
| 1628 | if (!cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i))) |
| 1629 | return false; |
| 1630 | |
| 1631 | return true; |
| 1632 | } |
| 1633 | |
| 1634 | case 't': |
| 1635 | return same_type_p (t1, t2); |
| 1636 | } |
| 1637 | |
| 1638 | my_friendly_assert (0, 20030617); |
| 1639 | return false; |
| 1640 | } |
| 1641 | |
| 1642 | /* Build a wrapper around a 'struct z_candidate' so we can use it as a |
| 1643 | tree. */ |
| 1644 | |
| 1645 | tree |
| 1646 | build_zc_wrapper (struct z_candidate* ptr) |
| 1647 | { |
| 1648 | tree t = make_node (WRAPPER); |
| 1649 | WRAPPER_ZC (t) = ptr; |
| 1650 | return t; |
| 1651 | } |
| 1652 | |
| 1653 | /* The type of ARG when used as an lvalue. */ |
| 1654 | |
| 1655 | tree |
| 1656 | lvalue_type (tree arg) |
| 1657 | { |
| 1658 | tree type = TREE_TYPE (arg); |
| 1659 | return type; |
| 1660 | } |
| 1661 | |
| 1662 | /* The type of ARG for printing error messages; denote lvalues with |
| 1663 | reference types. */ |
| 1664 | |
| 1665 | tree |
| 1666 | error_type (tree arg) |
| 1667 | { |
| 1668 | tree type = TREE_TYPE (arg); |
| 1669 | |
| 1670 | if (TREE_CODE (type) == ARRAY_TYPE) |
| 1671 | ; |
| 1672 | else if (TREE_CODE (type) == ERROR_MARK) |
| 1673 | ; |
| 1674 | else if (real_lvalue_p (arg)) |
| 1675 | type = build_reference_type (lvalue_type (arg)); |
| 1676 | else if (IS_AGGR_TYPE (type)) |
| 1677 | type = lvalue_type (arg); |
| 1678 | |
| 1679 | return type; |
| 1680 | } |
| 1681 | |
| 1682 | /* Does FUNCTION use a variable-length argument list? */ |
| 1683 | |
| 1684 | int |
| 1685 | varargs_function_p (tree function) |
| 1686 | { |
| 1687 | tree parm = TYPE_ARG_TYPES (TREE_TYPE (function)); |
| 1688 | for (; parm; parm = TREE_CHAIN (parm)) |
| 1689 | if (TREE_VALUE (parm) == void_type_node) |
| 1690 | return 0; |
| 1691 | return 1; |
| 1692 | } |
| 1693 | |
| 1694 | /* Returns 1 if decl is a member of a class. */ |
| 1695 | |
| 1696 | int |
| 1697 | member_p (tree decl) |
| 1698 | { |
| 1699 | const tree ctx = DECL_CONTEXT (decl); |
| 1700 | return (ctx && TYPE_P (ctx)); |
| 1701 | } |
| 1702 | |
| 1703 | /* Create a placeholder for member access where we don't actually have an |
| 1704 | object that the access is against. */ |
| 1705 | |
| 1706 | tree |
| 1707 | build_dummy_object (tree type) |
| 1708 | { |
| 1709 | tree decl = build1 (NOP_EXPR, build_pointer_type (type), void_zero_node); |
| 1710 | return build_indirect_ref (decl, NULL); |
| 1711 | } |
| 1712 | |
| 1713 | /* We've gotten a reference to a member of TYPE. Return *this if appropriate, |
| 1714 | or a dummy object otherwise. If BINFOP is non-0, it is filled with the |
| 1715 | binfo path from current_class_type to TYPE, or 0. */ |
| 1716 | |
| 1717 | tree |
| 1718 | maybe_dummy_object (tree type, tree* binfop) |
| 1719 | { |
| 1720 | tree decl, context; |
| 1721 | tree binfo; |
| 1722 | |
| 1723 | if (current_class_type |
| 1724 | && (binfo = lookup_base (current_class_type, type, |
| 1725 | ba_ignore | ba_quiet, NULL))) |
| 1726 | context = current_class_type; |
| 1727 | else |
| 1728 | { |
| 1729 | /* Reference from a nested class member function. */ |
| 1730 | context = type; |
| 1731 | binfo = TYPE_BINFO (type); |
| 1732 | } |
| 1733 | |
| 1734 | if (binfop) |
| 1735 | *binfop = binfo; |
| 1736 | |
| 1737 | if (current_class_ref && context == current_class_type |
| 1738 | /* Kludge: Make sure that current_class_type is actually |
| 1739 | correct. It might not be if we're in the middle of |
| 1740 | tsubst_default_argument. */ |
| 1741 | && same_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref)), |
| 1742 | current_class_type)) |
| 1743 | decl = current_class_ref; |
| 1744 | else |
| 1745 | decl = build_dummy_object (context); |
| 1746 | |
| 1747 | return decl; |
| 1748 | } |
| 1749 | |
| 1750 | /* Returns 1 if OB is a placeholder object, or a pointer to one. */ |
| 1751 | |
| 1752 | int |
| 1753 | is_dummy_object (tree ob) |
| 1754 | { |
| 1755 | if (TREE_CODE (ob) == INDIRECT_REF) |
| 1756 | ob = TREE_OPERAND (ob, 0); |
| 1757 | return (TREE_CODE (ob) == NOP_EXPR |
| 1758 | && TREE_OPERAND (ob, 0) == void_zero_node); |
| 1759 | } |
| 1760 | |
| 1761 | /* Returns 1 iff type T is a POD type, as defined in [basic.types]. */ |
| 1762 | |
| 1763 | int |
| 1764 | pod_type_p (tree t) |
| 1765 | { |
| 1766 | t = strip_array_types (t); |
| 1767 | |
| 1768 | if (t == error_mark_node) |
| 1769 | return 1; |
| 1770 | if (INTEGRAL_TYPE_P (t)) |
| 1771 | return 1; /* integral, character or enumeral type */ |
| 1772 | if (FLOAT_TYPE_P (t)) |
| 1773 | return 1; |
| 1774 | if (TYPE_PTR_P (t)) |
| 1775 | return 1; /* pointer to non-member */ |
| 1776 | if (TYPE_PTR_TO_MEMBER_P (t)) |
| 1777 | return 1; /* pointer to member */ |
| 1778 | |
| 1779 | if (! CLASS_TYPE_P (t)) |
| 1780 | return 0; /* other non-class type (reference or function) */ |
| 1781 | if (CLASSTYPE_NON_POD_P (t)) |
| 1782 | return 0; |
| 1783 | return 1; |
| 1784 | } |
| 1785 | |
| 1786 | /* Returns 1 iff zero initialization of type T means actually storing |
| 1787 | zeros in it. */ |
| 1788 | |
| 1789 | int |
| 1790 | zero_init_p (tree t) |
| 1791 | { |
| 1792 | t = strip_array_types (t); |
| 1793 | |
| 1794 | if (t == error_mark_node) |
| 1795 | return 1; |
| 1796 | |
| 1797 | /* NULL pointers to data members are initialized with -1. */ |
| 1798 | if (TYPE_PTRMEM_P (t)) |
| 1799 | return 0; |
| 1800 | |
| 1801 | /* Classes that contain types that can't be zero-initialized, cannot |
| 1802 | be zero-initialized themselves. */ |
| 1803 | if (CLASS_TYPE_P (t) && CLASSTYPE_NON_ZERO_INIT_P (t)) |
| 1804 | return 0; |
| 1805 | |
| 1806 | return 1; |
| 1807 | } |
| 1808 | |
| 1809 | /* Table of valid C++ attributes. */ |
| 1810 | const struct attribute_spec cxx_attribute_table[] = |
| 1811 | { |
| 1812 | /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */ |
| 1813 | { "java_interface", 0, 0, false, false, false, handle_java_interface_attribute }, |
| 1814 | { "com_interface", 0, 0, false, false, false, handle_com_interface_attribute }, |
| 1815 | { "init_priority", 1, 1, true, false, false, handle_init_priority_attribute }, |
| 1816 | { NULL, 0, 0, false, false, false, NULL } |
| 1817 | }; |
| 1818 | |
| 1819 | /* Handle a "java_interface" attribute; arguments as in |
| 1820 | struct attribute_spec.handler. */ |
| 1821 | static tree |
| 1822 | handle_java_interface_attribute (tree* node, |
| 1823 | tree name, |
| 1824 | tree args ATTRIBUTE_UNUSED , |
| 1825 | int flags, |
| 1826 | bool* no_add_attrs) |
| 1827 | { |
| 1828 | if (DECL_P (*node) |
| 1829 | || !CLASS_TYPE_P (*node) |
| 1830 | || !TYPE_FOR_JAVA (*node)) |
| 1831 | { |
| 1832 | error ("`%s' attribute can only be applied to Java class definitions", |
| 1833 | IDENTIFIER_POINTER (name)); |
| 1834 | *no_add_attrs = true; |
| 1835 | return NULL_TREE; |
| 1836 | } |
| 1837 | if (!(flags & (int) ATTR_FLAG_TYPE_IN_PLACE)) |
| 1838 | *node = build_type_copy (*node); |
| 1839 | TYPE_JAVA_INTERFACE (*node) = 1; |
| 1840 | |
| 1841 | return NULL_TREE; |
| 1842 | } |
| 1843 | |
| 1844 | /* Handle a "com_interface" attribute; arguments as in |
| 1845 | struct attribute_spec.handler. */ |
| 1846 | static tree |
| 1847 | handle_com_interface_attribute (tree* node, |
| 1848 | tree name, |
| 1849 | tree args ATTRIBUTE_UNUSED , |
| 1850 | int flags ATTRIBUTE_UNUSED , |
| 1851 | bool* no_add_attrs) |
| 1852 | { |
| 1853 | static int warned; |
| 1854 | |
| 1855 | *no_add_attrs = true; |
| 1856 | |
| 1857 | if (DECL_P (*node) |
| 1858 | || !CLASS_TYPE_P (*node) |
| 1859 | || *node != TYPE_MAIN_VARIANT (*node)) |
| 1860 | { |
| 1861 | warning ("`%s' attribute can only be applied to class definitions", |
| 1862 | IDENTIFIER_POINTER (name)); |
| 1863 | return NULL_TREE; |
| 1864 | } |
| 1865 | |
| 1866 | if (!warned++) |
| 1867 | warning ("`%s' is obsolete; g++ vtables are now COM-compatible by default", |
| 1868 | IDENTIFIER_POINTER (name)); |
| 1869 | |
| 1870 | return NULL_TREE; |
| 1871 | } |
| 1872 | |
| 1873 | /* Handle an "init_priority" attribute; arguments as in |
| 1874 | struct attribute_spec.handler. */ |
| 1875 | static tree |
| 1876 | handle_init_priority_attribute (tree* node, |
| 1877 | tree name, |
| 1878 | tree args, |
| 1879 | int flags ATTRIBUTE_UNUSED , |
| 1880 | bool* no_add_attrs) |
| 1881 | { |
| 1882 | tree initp_expr = TREE_VALUE (args); |
| 1883 | tree decl = *node; |
| 1884 | tree type = TREE_TYPE (decl); |
| 1885 | int pri; |
| 1886 | |
| 1887 | STRIP_NOPS (initp_expr); |
| 1888 | |
| 1889 | if (!initp_expr || TREE_CODE (initp_expr) != INTEGER_CST) |
| 1890 | { |
| 1891 | error ("requested init_priority is not an integer constant"); |
| 1892 | *no_add_attrs = true; |
| 1893 | return NULL_TREE; |
| 1894 | } |
| 1895 | |
| 1896 | pri = TREE_INT_CST_LOW (initp_expr); |
| 1897 | |
| 1898 | type = strip_array_types (type); |
| 1899 | |
| 1900 | if (decl == NULL_TREE |
| 1901 | || TREE_CODE (decl) != VAR_DECL |
| 1902 | || !TREE_STATIC (decl) |
| 1903 | || DECL_EXTERNAL (decl) |
| 1904 | || (TREE_CODE (type) != RECORD_TYPE |
| 1905 | && TREE_CODE (type) != UNION_TYPE) |
| 1906 | /* Static objects in functions are initialized the |
| 1907 | first time control passes through that |
| 1908 | function. This is not precise enough to pin down an |
| 1909 | init_priority value, so don't allow it. */ |
| 1910 | || current_function_decl) |
| 1911 | { |
| 1912 | error ("can only use `%s' attribute on file-scope definitions of objects of class type", |
| 1913 | IDENTIFIER_POINTER (name)); |
| 1914 | *no_add_attrs = true; |
| 1915 | return NULL_TREE; |
| 1916 | } |
| 1917 | |
| 1918 | if (pri > MAX_INIT_PRIORITY || pri <= 0) |
| 1919 | { |
| 1920 | error ("requested init_priority is out of range"); |
| 1921 | *no_add_attrs = true; |
| 1922 | return NULL_TREE; |
| 1923 | } |
| 1924 | |
| 1925 | /* Check for init_priorities that are reserved for |
| 1926 | language and runtime support implementations.*/ |
| 1927 | if (pri <= MAX_RESERVED_INIT_PRIORITY) |
| 1928 | { |
| 1929 | warning |
| 1930 | ("requested init_priority is reserved for internal use"); |
| 1931 | } |
| 1932 | |
| 1933 | if (SUPPORTS_INIT_PRIORITY) |
| 1934 | { |
| 1935 | DECL_INIT_PRIORITY (decl) = pri; |
| 1936 | return NULL_TREE; |
| 1937 | } |
| 1938 | else |
| 1939 | { |
| 1940 | error ("`%s' attribute is not supported on this platform", |
| 1941 | IDENTIFIER_POINTER (name)); |
| 1942 | *no_add_attrs = true; |
| 1943 | return NULL_TREE; |
| 1944 | } |
| 1945 | } |
| 1946 | |
| 1947 | /* Return a new PTRMEM_CST of the indicated TYPE. The MEMBER is the |
| 1948 | thing pointed to by the constant. */ |
| 1949 | |
| 1950 | tree |
| 1951 | make_ptrmem_cst (tree type, tree member) |
| 1952 | { |
| 1953 | tree ptrmem_cst = make_node (PTRMEM_CST); |
| 1954 | /* If would seem a great convenience if make_node would set |
| 1955 | TREE_CONSTANT for things of class `c', but it does not. */ |
| 1956 | TREE_CONSTANT (ptrmem_cst) = 1; |
| 1957 | TREE_TYPE (ptrmem_cst) = type; |
| 1958 | PTRMEM_CST_MEMBER (ptrmem_cst) = member; |
| 1959 | return ptrmem_cst; |
| 1960 | } |
| 1961 | |
| 1962 | /* Build a variant of TYPE that has the indicated ATTRIBUTES. May |
| 1963 | return an existing type of an appropriate type already exists. */ |
| 1964 | |
| 1965 | tree |
| 1966 | cp_build_type_attribute_variant (tree type, tree attributes) |
| 1967 | { |
| 1968 | tree new_type; |
| 1969 | |
| 1970 | new_type = build_type_attribute_variant (type, attributes); |
| 1971 | if (TREE_CODE (new_type) == FUNCTION_TYPE |
| 1972 | && (TYPE_RAISES_EXCEPTIONS (new_type) |
| 1973 | != TYPE_RAISES_EXCEPTIONS (type))) |
| 1974 | new_type = build_exception_variant (new_type, |
| 1975 | TYPE_RAISES_EXCEPTIONS (type)); |
| 1976 | return new_type; |
| 1977 | } |
| 1978 | |
| 1979 | /* Apply FUNC to all language-specific sub-trees of TP in a pre-order |
| 1980 | traversal. Called from walk_tree(). */ |
| 1981 | |
| 1982 | tree |
| 1983 | cp_walk_subtrees (tree* tp, |
| 1984 | int* walk_subtrees_p, |
| 1985 | walk_tree_fn func, |
| 1986 | void* data, |
| 1987 | void* htab) |
| 1988 | { |
| 1989 | enum tree_code code = TREE_CODE (*tp); |
| 1990 | tree result; |
| 1991 | |
| 1992 | #define WALK_SUBTREE(NODE) \ |
| 1993 | do \ |
| 1994 | { \ |
| 1995 | result = walk_tree (&(NODE), func, data, htab); \ |
| 1996 | if (result) \ |
| 1997 | return result; \ |
| 1998 | } \ |
| 1999 | while (0) |
| 2000 | |
| 2001 | /* Not one of the easy cases. We must explicitly go through the |
| 2002 | children. */ |
| 2003 | switch (code) |
| 2004 | { |
| 2005 | case DEFAULT_ARG: |
| 2006 | case TEMPLATE_TEMPLATE_PARM: |
| 2007 | case BOUND_TEMPLATE_TEMPLATE_PARM: |
| 2008 | case UNBOUND_CLASS_TEMPLATE: |
| 2009 | case TEMPLATE_PARM_INDEX: |
| 2010 | case TEMPLATE_TYPE_PARM: |
| 2011 | case TYPENAME_TYPE: |
| 2012 | case TYPEOF_TYPE: |
| 2013 | case BASELINK: |
| 2014 | /* None of these have subtrees other than those already walked |
| 2015 | above. */ |
| 2016 | *walk_subtrees_p = 0; |
| 2017 | break; |
| 2018 | |
| 2019 | case PTRMEM_CST: |
| 2020 | WALK_SUBTREE (TREE_TYPE (*tp)); |
| 2021 | *walk_subtrees_p = 0; |
| 2022 | break; |
| 2023 | |
| 2024 | case TREE_LIST: |
| 2025 | WALK_SUBTREE (TREE_PURPOSE (*tp)); |
| 2026 | break; |
| 2027 | |
| 2028 | case OVERLOAD: |
| 2029 | WALK_SUBTREE (OVL_FUNCTION (*tp)); |
| 2030 | WALK_SUBTREE (OVL_CHAIN (*tp)); |
| 2031 | *walk_subtrees_p = 0; |
| 2032 | break; |
| 2033 | |
| 2034 | case RECORD_TYPE: |
| 2035 | if (TYPE_PTRMEMFUNC_P (*tp)) |
| 2036 | WALK_SUBTREE (TYPE_PTRMEMFUNC_FN_TYPE (*tp)); |
| 2037 | break; |
| 2038 | |
| 2039 | default: |
| 2040 | break; |
| 2041 | } |
| 2042 | |
| 2043 | /* We didn't find what we were looking for. */ |
| 2044 | return NULL_TREE; |
| 2045 | |
| 2046 | #undef WALK_SUBTREE |
| 2047 | } |
| 2048 | |
| 2049 | /* Decide whether there are language-specific reasons to not inline a |
| 2050 | function as a tree. */ |
| 2051 | |
| 2052 | int |
| 2053 | cp_cannot_inline_tree_fn (tree* fnp) |
| 2054 | { |
| 2055 | tree fn = *fnp; |
| 2056 | |
| 2057 | /* We can inline a template instantiation only if it's fully |
| 2058 | instantiated. */ |
| 2059 | if (DECL_TEMPLATE_INFO (fn) |
| 2060 | && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn))) |
| 2061 | { |
| 2062 | /* Don't instantiate functions that are not going to be |
| 2063 | inlined. */ |
| 2064 | if (!DECL_INLINE (DECL_TEMPLATE_RESULT |
| 2065 | (template_for_substitution (fn)))) |
| 2066 | return 1; |
| 2067 | |
| 2068 | fn = *fnp = instantiate_decl (fn, /*defer_ok=*/0); |
| 2069 | |
| 2070 | if (TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn))) |
| 2071 | return 1; |
| 2072 | } |
| 2073 | |
| 2074 | if (flag_really_no_inline |
| 2075 | && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)) == NULL) |
| 2076 | return 1; |
| 2077 | |
| 2078 | /* Don't auto-inline anything that might not be bound within |
| 2079 | this unit of translation. */ |
| 2080 | if (!DECL_DECLARED_INLINE_P (fn) && !(*targetm.binds_local_p) (fn)) |
| 2081 | { |
| 2082 | DECL_UNINLINABLE (fn) = 1; |
| 2083 | return 1; |
| 2084 | } |
| 2085 | |
| 2086 | if (varargs_function_p (fn)) |
| 2087 | { |
| 2088 | DECL_UNINLINABLE (fn) = 1; |
| 2089 | return 1; |
| 2090 | } |
| 2091 | |
| 2092 | if (! function_attribute_inlinable_p (fn)) |
| 2093 | { |
| 2094 | DECL_UNINLINABLE (fn) = 1; |
| 2095 | return 1; |
| 2096 | } |
| 2097 | |
| 2098 | return 0; |
| 2099 | } |
| 2100 | |
| 2101 | /* Add any pending functions other than the current function (already |
| 2102 | handled by the caller), that thus cannot be inlined, to FNS_P, then |
| 2103 | return the latest function added to the array, PREV_FN. */ |
| 2104 | |
| 2105 | tree |
| 2106 | cp_add_pending_fn_decls (void* fns_p, tree prev_fn) |
| 2107 | { |
| 2108 | varray_type *fnsp = (varray_type *)fns_p; |
| 2109 | struct saved_scope *s; |
| 2110 | |
| 2111 | for (s = scope_chain; s; s = s->prev) |
| 2112 | if (s->function_decl && s->function_decl != prev_fn) |
| 2113 | { |
| 2114 | VARRAY_PUSH_TREE (*fnsp, s->function_decl); |
| 2115 | prev_fn = s->function_decl; |
| 2116 | } |
| 2117 | |
| 2118 | return prev_fn; |
| 2119 | } |
| 2120 | |
| 2121 | /* Determine whether a tree node is an OVERLOAD node. Used to decide |
| 2122 | whether to copy a node or to preserve its chain when inlining a |
| 2123 | function. */ |
| 2124 | |
| 2125 | int |
| 2126 | cp_is_overload_p (tree t) |
| 2127 | { |
| 2128 | return TREE_CODE (t) == OVERLOAD; |
| 2129 | } |
| 2130 | |
| 2131 | /* Determine whether VAR is a declaration of an automatic variable in |
| 2132 | function FN. */ |
| 2133 | |
| 2134 | int |
| 2135 | cp_auto_var_in_fn_p (tree var, tree fn) |
| 2136 | { |
| 2137 | return (DECL_P (var) && DECL_CONTEXT (var) == fn |
| 2138 | && nonstatic_local_decl_p (var)); |
| 2139 | } |
| 2140 | |
| 2141 | /* Tell whether a declaration is needed for the RESULT of a function |
| 2142 | FN being inlined into CALLER or if the top node of target_exprs is |
| 2143 | to be used. */ |
| 2144 | |
| 2145 | tree |
| 2146 | cp_copy_res_decl_for_inlining (tree result, |
| 2147 | tree fn, |
| 2148 | tree caller, |
| 2149 | void* decl_map_, |
| 2150 | int* need_decl, |
| 2151 | tree return_slot_addr) |
| 2152 | { |
| 2153 | splay_tree decl_map = (splay_tree)decl_map_; |
| 2154 | tree var; |
| 2155 | |
| 2156 | /* If FN returns an aggregate then the caller will always pass the |
| 2157 | address of the return slot explicitly. If we were just to |
| 2158 | create a new VAR_DECL here, then the result of this function |
| 2159 | would be copied (bitwise) into the variable initialized by the |
| 2160 | TARGET_EXPR. That's incorrect, so we must transform any |
| 2161 | references to the RESULT into references to the target. */ |
| 2162 | |
| 2163 | /* We should have an explicit return slot iff the return type is |
| 2164 | TREE_ADDRESSABLE. See simplify_aggr_init_expr. */ |
| 2165 | if (TREE_ADDRESSABLE (TREE_TYPE (result)) |
| 2166 | != (return_slot_addr != NULL_TREE)) |
| 2167 | abort (); |
| 2168 | |
| 2169 | *need_decl = !return_slot_addr; |
| 2170 | if (return_slot_addr) |
| 2171 | { |
| 2172 | var = build_indirect_ref (return_slot_addr, ""); |
| 2173 | if (! same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (var), |
| 2174 | TREE_TYPE (result))) |
| 2175 | abort (); |
| 2176 | } |
| 2177 | /* Otherwise, make an appropriate copy. */ |
| 2178 | else |
| 2179 | var = copy_decl_for_inlining (result, fn, caller); |
| 2180 | |
| 2181 | if (DECL_SAVED_FUNCTION_DATA (fn)) |
| 2182 | { |
| 2183 | tree nrv = DECL_SAVED_FUNCTION_DATA (fn)->x_return_value; |
| 2184 | if (nrv) |
| 2185 | { |
| 2186 | /* We have a named return value; copy the name and source |
| 2187 | position so we can get reasonable debugging information, and |
| 2188 | register the return variable as its equivalent. */ |
| 2189 | if (TREE_CODE (var) == VAR_DECL |
| 2190 | /* But not if we're initializing a variable from the |
| 2191 | enclosing function which already has its own name. */ |
| 2192 | && DECL_NAME (var) == NULL_TREE) |
| 2193 | { |
| 2194 | DECL_NAME (var) = DECL_NAME (nrv); |
| 2195 | DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (nrv); |
| 2196 | DECL_ABSTRACT_ORIGIN (var) = DECL_ORIGIN (nrv); |
| 2197 | /* Don't lose initialization info. */ |
| 2198 | DECL_INITIAL (var) = DECL_INITIAL (nrv); |
| 2199 | /* Don't forget that it needs to go in the stack. */ |
| 2200 | TREE_ADDRESSABLE (var) = TREE_ADDRESSABLE (nrv); |
| 2201 | } |
| 2202 | |
| 2203 | splay_tree_insert (decl_map, |
| 2204 | (splay_tree_key) nrv, |
| 2205 | (splay_tree_value) var); |
| 2206 | } |
| 2207 | } |
| 2208 | |
| 2209 | return var; |
| 2210 | } |
| 2211 | |
| 2212 | /* Initialize tree.c. */ |
| 2213 | |
| 2214 | void |
| 2215 | init_tree (void) |
| 2216 | { |
| 2217 | list_hash_table = htab_create_ggc (31, list_hash, list_hash_eq, NULL); |
| 2218 | } |
| 2219 | |
| 2220 | /* Called via walk_tree. If *TP points to a DECL_STMT for a local |
| 2221 | declaration, copies the declaration and enters it in the splay_tree |
| 2222 | pointed to by DATA (which is really a `splay_tree *'). */ |
| 2223 | |
| 2224 | static tree |
| 2225 | mark_local_for_remap_r (tree* tp, |
| 2226 | int* walk_subtrees ATTRIBUTE_UNUSED , |
| 2227 | void* data) |
| 2228 | { |
| 2229 | tree t = *tp; |
| 2230 | splay_tree st = (splay_tree) data; |
| 2231 | tree decl; |
| 2232 | |
| 2233 | |
| 2234 | if (TREE_CODE (t) == DECL_STMT |
| 2235 | && nonstatic_local_decl_p (DECL_STMT_DECL (t))) |
| 2236 | decl = DECL_STMT_DECL (t); |
| 2237 | else if (TREE_CODE (t) == LABEL_STMT) |
| 2238 | decl = LABEL_STMT_LABEL (t); |
| 2239 | else if (TREE_CODE (t) == TARGET_EXPR |
| 2240 | && nonstatic_local_decl_p (TREE_OPERAND (t, 0))) |
| 2241 | decl = TREE_OPERAND (t, 0); |
| 2242 | else if (TREE_CODE (t) == CASE_LABEL) |
| 2243 | decl = CASE_LABEL_DECL (t); |
| 2244 | else |
| 2245 | decl = NULL_TREE; |
| 2246 | |
| 2247 | if (decl) |
| 2248 | { |
| 2249 | tree copy; |
| 2250 | |
| 2251 | /* Make a copy. */ |
| 2252 | copy = copy_decl_for_inlining (decl, |
| 2253 | DECL_CONTEXT (decl), |
| 2254 | DECL_CONTEXT (decl)); |
| 2255 | |
| 2256 | /* Remember the copy. */ |
| 2257 | splay_tree_insert (st, |
| 2258 | (splay_tree_key) decl, |
| 2259 | (splay_tree_value) copy); |
| 2260 | } |
| 2261 | |
| 2262 | return NULL_TREE; |
| 2263 | } |
| 2264 | |
| 2265 | /* Called via walk_tree when an expression is unsaved. Using the |
| 2266 | splay_tree pointed to by ST (which is really a `splay_tree'), |
| 2267 | remaps all local declarations to appropriate replacements. */ |
| 2268 | |
| 2269 | static tree |
| 2270 | cp_unsave_r (tree* tp, |
| 2271 | int* walk_subtrees, |
| 2272 | void* data) |
| 2273 | { |
| 2274 | splay_tree st = (splay_tree) data; |
| 2275 | splay_tree_node n; |
| 2276 | |
| 2277 | /* Only a local declaration (variable or label). */ |
| 2278 | if (nonstatic_local_decl_p (*tp)) |
| 2279 | { |
| 2280 | /* Lookup the declaration. */ |
| 2281 | n = splay_tree_lookup (st, (splay_tree_key) *tp); |
| 2282 | |
| 2283 | /* If it's there, remap it. */ |
| 2284 | if (n) |
| 2285 | *tp = (tree) n->value; |
| 2286 | } |
| 2287 | else if (TREE_CODE (*tp) == SAVE_EXPR) |
| 2288 | remap_save_expr (tp, st, current_function_decl, walk_subtrees); |
| 2289 | else |
| 2290 | { |
| 2291 | copy_tree_r (tp, walk_subtrees, NULL); |
| 2292 | |
| 2293 | /* Do whatever unsaving is required. */ |
| 2294 | unsave_expr_1 (*tp); |
| 2295 | } |
| 2296 | |
| 2297 | /* Keep iterating. */ |
| 2298 | return NULL_TREE; |
| 2299 | } |
| 2300 | |
| 2301 | /* Called whenever an expression needs to be unsaved. */ |
| 2302 | |
| 2303 | tree |
| 2304 | cxx_unsave_expr_now (tree tp) |
| 2305 | { |
| 2306 | splay_tree st; |
| 2307 | |
| 2308 | /* Create a splay-tree to map old local variable declarations to new |
| 2309 | ones. */ |
| 2310 | st = splay_tree_new (splay_tree_compare_pointers, NULL, NULL); |
| 2311 | |
| 2312 | /* Walk the tree once figuring out what needs to be remapped. */ |
| 2313 | walk_tree (&tp, mark_local_for_remap_r, st, NULL); |
| 2314 | |
| 2315 | /* Walk the tree again, copying, remapping, and unsaving. */ |
| 2316 | walk_tree (&tp, cp_unsave_r, st, NULL); |
| 2317 | |
| 2318 | /* Clean up. */ |
| 2319 | splay_tree_delete (st); |
| 2320 | |
| 2321 | return tp; |
| 2322 | } |
| 2323 | |
| 2324 | /* Returns the kind of special function that DECL (a FUNCTION_DECL) |
| 2325 | is. Note that sfk_none is zero, so this function can be used as a |
| 2326 | predicate to test whether or not DECL is a special function. */ |
| 2327 | |
| 2328 | special_function_kind |
| 2329 | special_function_p (tree decl) |
| 2330 | { |
| 2331 | /* Rather than doing all this stuff with magic names, we should |
| 2332 | probably have a field of type `special_function_kind' in |
| 2333 | DECL_LANG_SPECIFIC. */ |
| 2334 | if (DECL_COPY_CONSTRUCTOR_P (decl)) |
| 2335 | return sfk_copy_constructor; |
| 2336 | if (DECL_CONSTRUCTOR_P (decl)) |
| 2337 | return sfk_constructor; |
| 2338 | if (DECL_OVERLOADED_OPERATOR_P (decl) == NOP_EXPR) |
| 2339 | return sfk_assignment_operator; |
| 2340 | if (DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (decl)) |
| 2341 | return sfk_destructor; |
| 2342 | if (DECL_COMPLETE_DESTRUCTOR_P (decl)) |
| 2343 | return sfk_complete_destructor; |
| 2344 | if (DECL_BASE_DESTRUCTOR_P (decl)) |
| 2345 | return sfk_base_destructor; |
| 2346 | if (DECL_DELETING_DESTRUCTOR_P (decl)) |
| 2347 | return sfk_deleting_destructor; |
| 2348 | if (DECL_CONV_FN_P (decl)) |
| 2349 | return sfk_conversion; |
| 2350 | |
| 2351 | return sfk_none; |
| 2352 | } |
| 2353 | |
| 2354 | /* Returns true if and only if NODE is a name, i.e., a node created |
| 2355 | by the parser when processing an id-expression. */ |
| 2356 | |
| 2357 | bool |
| 2358 | name_p (tree node) |
| 2359 | { |
| 2360 | if (TREE_CODE (node) == TEMPLATE_ID_EXPR) |
| 2361 | node = TREE_OPERAND (node, 0); |
| 2362 | return (/* An ordinary unqualified name. */ |
| 2363 | TREE_CODE (node) == IDENTIFIER_NODE |
| 2364 | /* A destructor name. */ |
| 2365 | || TREE_CODE (node) == BIT_NOT_EXPR |
| 2366 | /* A qualified name. */ |
| 2367 | || TREE_CODE (node) == SCOPE_REF); |
| 2368 | } |
| 2369 | |
| 2370 | /* Returns nonzero if TYPE is a character type, including wchar_t. */ |
| 2371 | |
| 2372 | int |
| 2373 | char_type_p (tree type) |
| 2374 | { |
| 2375 | return (same_type_p (type, char_type_node) |
| 2376 | || same_type_p (type, unsigned_char_type_node) |
| 2377 | || same_type_p (type, signed_char_type_node) |
| 2378 | || same_type_p (type, wchar_type_node)); |
| 2379 | } |
| 2380 | |
| 2381 | /* Returns the kind of linkage associated with the indicated DECL. Th |
| 2382 | value returned is as specified by the language standard; it is |
| 2383 | independent of implementation details regarding template |
| 2384 | instantiation, etc. For example, it is possible that a declaration |
| 2385 | to which this function assigns external linkage would not show up |
| 2386 | as a global symbol when you run `nm' on the resulting object file. */ |
| 2387 | |
| 2388 | linkage_kind |
| 2389 | decl_linkage (tree decl) |
| 2390 | { |
| 2391 | /* This function doesn't attempt to calculate the linkage from first |
| 2392 | principles as given in [basic.link]. Instead, it makes use of |
| 2393 | the fact that we have already set TREE_PUBLIC appropriately, and |
| 2394 | then handles a few special cases. Ideally, we would calculate |
| 2395 | linkage first, and then transform that into a concrete |
| 2396 | implementation. */ |
| 2397 | |
| 2398 | /* Things that don't have names have no linkage. */ |
| 2399 | if (!DECL_NAME (decl)) |
| 2400 | return lk_none; |
| 2401 | |
| 2402 | /* Things that are TREE_PUBLIC have external linkage. */ |
| 2403 | if (TREE_PUBLIC (decl)) |
| 2404 | return lk_external; |
| 2405 | |
| 2406 | /* Some things that are not TREE_PUBLIC have external linkage, too. |
| 2407 | For example, on targets that don't have weak symbols, we make all |
| 2408 | template instantiations have internal linkage (in the object |
| 2409 | file), but the symbols should still be treated as having external |
| 2410 | linkage from the point of view of the language. */ |
| 2411 | if (DECL_LANG_SPECIFIC (decl) && DECL_COMDAT (decl)) |
| 2412 | return lk_external; |
| 2413 | |
| 2414 | /* Things in local scope do not have linkage, if they don't have |
| 2415 | TREE_PUBLIC set. */ |
| 2416 | if (decl_function_context (decl)) |
| 2417 | return lk_none; |
| 2418 | |
| 2419 | /* Everything else has internal linkage. */ |
| 2420 | return lk_internal; |
| 2421 | } |
| 2422 | \f |
| 2423 | /* EXP is an expression that we want to pre-evaluate. Returns via INITP an |
| 2424 | expression to perform the pre-evaluation, and returns directly an |
| 2425 | expression to use the precalculated result. */ |
| 2426 | |
| 2427 | tree |
| 2428 | stabilize_expr (tree exp, tree* initp) |
| 2429 | { |
| 2430 | tree init_expr; |
| 2431 | |
| 2432 | if (!TREE_SIDE_EFFECTS (exp)) |
| 2433 | { |
| 2434 | init_expr = NULL_TREE; |
| 2435 | } |
| 2436 | else if (!real_lvalue_p (exp) |
| 2437 | || !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (exp))) |
| 2438 | { |
| 2439 | init_expr = get_target_expr (exp); |
| 2440 | exp = TARGET_EXPR_SLOT (init_expr); |
| 2441 | } |
| 2442 | else |
| 2443 | { |
| 2444 | exp = build_unary_op (ADDR_EXPR, exp, 1); |
| 2445 | init_expr = get_target_expr (exp); |
| 2446 | exp = TARGET_EXPR_SLOT (init_expr); |
| 2447 | exp = build_indirect_ref (exp, 0); |
| 2448 | } |
| 2449 | |
| 2450 | *initp = init_expr; |
| 2451 | return exp; |
| 2452 | } |
| 2453 | |
| 2454 | /* Like stabilize_expr, but for a call whose args we want to |
| 2455 | pre-evaluate. */ |
| 2456 | |
| 2457 | void |
| 2458 | stabilize_call (tree call, tree *initp) |
| 2459 | { |
| 2460 | tree inits = NULL_TREE; |
| 2461 | tree t; |
| 2462 | |
| 2463 | if (call == error_mark_node) |
| 2464 | return; |
| 2465 | |
| 2466 | if (TREE_CODE (call) != CALL_EXPR |
| 2467 | && TREE_CODE (call) != AGGR_INIT_EXPR) |
| 2468 | abort (); |
| 2469 | |
| 2470 | for (t = TREE_OPERAND (call, 1); t; t = TREE_CHAIN (t)) |
| 2471 | if (TREE_SIDE_EFFECTS (TREE_VALUE (t))) |
| 2472 | { |
| 2473 | tree init; |
| 2474 | TREE_VALUE (t) = stabilize_expr (TREE_VALUE (t), &init); |
| 2475 | if (!init) |
| 2476 | /* Nothing. */; |
| 2477 | else if (inits) |
| 2478 | inits = build (COMPOUND_EXPR, void_type_node, inits, init); |
| 2479 | else |
| 2480 | inits = init; |
| 2481 | } |
| 2482 | |
| 2483 | *initp = inits; |
| 2484 | } |
| 2485 | |
| 2486 | /* Like stabilize_expr, but for an initialization. If we are initializing |
| 2487 | an object of class type, we don't want to introduce an extra temporary, |
| 2488 | so we look past the TARGET_EXPR and stabilize the arguments of the call |
| 2489 | instead. */ |
| 2490 | |
| 2491 | bool |
| 2492 | stabilize_init (tree init, tree *initp) |
| 2493 | { |
| 2494 | tree t = init; |
| 2495 | |
| 2496 | if (t == error_mark_node) |
| 2497 | return true; |
| 2498 | |
| 2499 | if (TREE_CODE (t) == INIT_EXPR |
| 2500 | && TREE_CODE (TREE_OPERAND (t, 1)) != TARGET_EXPR) |
| 2501 | TREE_OPERAND (t, 1) = stabilize_expr (TREE_OPERAND (t, 1), initp); |
| 2502 | else |
| 2503 | { |
| 2504 | if (TREE_CODE (t) == INIT_EXPR) |
| 2505 | t = TREE_OPERAND (t, 1); |
| 2506 | if (TREE_CODE (t) == TARGET_EXPR) |
| 2507 | t = TARGET_EXPR_INITIAL (t); |
| 2508 | if (TREE_CODE (t) == CONSTRUCTOR |
| 2509 | && CONSTRUCTOR_ELTS (t) == NULL_TREE) |
| 2510 | { |
| 2511 | /* Default-initialization. */ |
| 2512 | *initp = NULL_TREE; |
| 2513 | return true; |
| 2514 | } |
| 2515 | |
| 2516 | /* If the initializer is a COND_EXPR, we can't preevaluate |
| 2517 | anything. */ |
| 2518 | if (TREE_CODE (t) == COND_EXPR) |
| 2519 | return false; |
| 2520 | |
| 2521 | stabilize_call (t, initp); |
| 2522 | } |
| 2523 | |
| 2524 | return true; |
| 2525 | } |
| 2526 | |
| 2527 | \f |
| 2528 | #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007) |
| 2529 | /* Complain that some language-specific thing hanging off a tree |
| 2530 | node has been accessed improperly. */ |
| 2531 | |
| 2532 | void |
| 2533 | lang_check_failed (const char* file, int line, const char* function) |
| 2534 | { |
| 2535 | internal_error ("lang_* check: failed in %s, at %s:%d", |
| 2536 | function, trim_filename (file), line); |
| 2537 | } |
| 2538 | #endif /* ENABLE_TREE_CHECKING */ |
| 2539 | |
| 2540 | #include "gt-cp-tree.h" |