2 * RESOLVE.C - Resolve the parser tree and prepare for code generation or
5 * (c)Copyright 1993-2016, Matthew Dillon, All Rights Reserved. See the
6 * COPYRIGHT file at the base of the distribution.
8 * Pass1 - ResolveClasses() - Handles superclass/subclass merging for the
11 * Pass2 - Resolve*() - Resolves identifiers and identifier paths, plus
12 * the size and alignment for Types, Decls, and SemGroups.
14 * Utilizes a deferred work mechanic to avoid circular loops. This mechanism
15 * allows types to be partially resolved (enough to satisfy the caller), then
16 * finishes up via the deferred work queue.
24 static void ResolveClasses(Stmt *st, int flags);
25 static void ResolveAlignment(Stmt *st, int flags);
26 static void ResolveStorage(Stmt *st, int flags);
27 static void ResolveSemGroup(SemGroup *sg, int retry);
28 static void errorDottedId(runeid_t *ary, const char *ctl,...);
30 static void ResolveStmt(SemGroup *isg, Stmt *st, int flags);
31 static Type *ResolveType(Type *type, struct ResVis *vis, int retry);
32 static void ResolveDecl(Declaration *d, int retry);
33 static Exp *ResolveExp(SemGroup *isg, SemGroup *sg,
34 Exp *exp, Type *itype, int flags);
36 static Type *resolveReturnType(SemGroup *sg, int flags);
37 static Type *resolveArgsType(SemGroup *sg, int flags);
38 static Exp *resolveConstExp(SemGroup *isg, SemGroup *sg, Exp *exp, int flags);
39 static Exp *resolveConstExpBool(SemGroup *isg, SemGroup *sg, Exp *exp,
40 int flags, TmpData *ts);
41 static Exp *resolveCompoundExp(SemGroup *isg, SemGroup *sg,
42 Exp *exp, Type *itype, int flags);
43 static Exp *resolveBracketedExp(SemGroup *isg, SemGroup *sg,
44 Exp *exp, Type *itype, int flags);
45 static Exp *resolveExpCast(SemGroup *isg, SemGroup *sg,
46 Exp *exp, Type *ltype, int flags);
47 static Exp *resolveExpOper(SemGroup *isg, SemGroup *sg,
48 Exp *exp, Type *itype, int flags);
49 static void resolveSuperClass(Type *super);
51 static void resolveDeclAlign(Declaration *d, urunesize_t *expalignp, int flags);
52 static void resolveExpAlign(Exp *exp, urunesize_t *expalignp, int flags);
53 static void resolveTypeAlign(Type *type, urunesize_t *expalignp, int flags);
54 static void resolveSemGroupAlign(SemGroup *sg, int flags);
56 static void resolveDeclStorage(Declaration *d,
57 urunesize_t base, urunesize_t *limitp,
58 urunesize_t gbase, urunesize_t *glimitp);
59 static void resolveStorageExpOnly(Exp *exp,
60 urunesize_t base, urunesize_t *limitp);
61 static void resolveStorageExpSub(Exp *exp,
62 urunesize_t base, urunesize_t *limitp);
63 static void resolveStorageExp(Exp *exp,
64 urunesize_t base, urunesize_t *limitp);
66 static Declaration *findOper(Type *btype, runeid_t id,
67 Type *ltype, Type *rtype, int flags);
68 static Declaration *findExpOper(Exp *exp, int flags);
69 static Declaration *findCast(Type *btype, Type *ltype, Type *rtype, int flags);
70 static void resolveStorageType(Type *type, int isglob,
71 urunesize_t base, urunesize_t *limitp);
72 static void resolveStorageSemGroup(SemGroup *sg,
73 urunesize_t base, urunesize_t *limitp,
74 urunesize_t gbase, urunesize_t *glimitp);
75 static void methodCheckThisId(Type *type, Exp *exp);
77 static void resolveProcedureInline(SemGroup *isg, SemGroup *sg,
79 static void resolveDynamicProcedure(SemGroup *isg, SemGroup *sg,
81 static void resolveDynamicProcedureAlign(Exp *exp,
82 urunesize_t *expalignp, int flags);
83 static void resolveDynamicProcedureStorage(Exp *exp,
84 urunesize_t base, urunesize_t *limitp,
85 urunesize_t gbase, urunesize_t *glimitp);
87 static int SpecialSemGroupGet(runeid_t id);
88 static void ResolveMethodProcedureThisArg(SemGroup *sg, Declaration *d);
91 * Adjust type to be lvalue but do not modify its relative context for
94 #define ADD_LVALUE(type) \
95 ResolveType(AddTypeQual((type), SF_LVALUE), NULL, 0)
96 #define DEL_LVALUE(type) \
97 ResolveType(DelTypeQual((type), SF_LVALUE), NULL, 0)
99 #define RESOLVE_AUTOCAST 0x0001 /* autocast to expected type */
100 #define RESOLVE_CONSTEXP 0x0002 /* resolve for const interpretation */
101 #define RESOLVE_CLEAN 0x0004 /* cleanup after const interp */
102 #define RESOLVE_FAILOK 0x0008 /* cleanup after const interp */
104 #define BASEALIGN(base, alignmask) \
105 (((base) + alignmask) & ~(urunesize_t)(alignmask))
107 #define SIZELIMIT(base, bytes, limitp) \
108 if ((base) + (bytes) > *(limitp)) \
109 *(limitp) = ((base) + (bytes))
112 * Deferred work queue
114 typedef Type * type_p;
117 typedef struct ResVis {
122 typedef struct ResDefer {
123 struct ResDefer *next;
125 RES_STMT, RES_DECL, RES_TYPE, RES_EXP, RES_SEMGROUP
153 static resdelay_t *ResDeferBase;
154 static resdelay_t **ResDeferTail = &ResDeferBase;
157 int RuneInlineComplexity = 20;
160 * Do a pass on all deferred work. Returns non-zero if there is more
161 * deferred work after the pass is complete.
165 runDeferredWork(void)
168 resdelay_t **last = ResDeferTail;
172 while ((res = ResDeferBase) != NULL) {
173 if ((ResDeferBase = res->next) == NULL)
174 ResDeferTail = &ResDeferBase;
175 switch (res->which) {
177 ResolveStmt(res->stmt.isg,
182 ResolveDecl(res->decl.d, 1);
185 type = ResolveType(res->type.type, NULL, 1);
186 dassert(type == res->type.type);
189 exp = ResolveExp(res->exp.isg, res->exp.sg,
190 res->exp.exp, res->exp.itype,
192 dassert(exp == res->exp.exp);
195 ResolveSemGroup(res->sg.sg, 1);
201 zfree(res, sizeof(*res));
202 if (&res->next == last) /* storage freed, ok to test ptr */
205 return (ResDeferBase != NULL);
211 deferStmt(SemGroup *isg, Stmt *st, int flags)
215 res = zalloc(sizeof(*res));
216 res->which = RES_STMT;
219 res->stmt.flags = flags;
221 ResDeferTail = &res->next;
227 deferDecl(Declaration *d)
231 res = zalloc(sizeof(*res));
232 res->which = RES_DECL;
235 ResDeferTail = &res->next;
241 deferExp(SemGroup *isg, SemGroup *sg, Exp *exp, Type *itype, int flags)
245 res = zalloc(sizeof(*res));
246 res->which = RES_EXP;
250 res->exp.itype = itype;
251 res->exp.flags = flags;
253 ResDeferTail = &res->next;
257 * Note that visibility is set immediately by the call chain, NOT in any
262 deferType(Type *type)
266 res = zalloc(sizeof(*res));
267 res->which = RES_TYPE;
268 res->type.type = type;
270 ResDeferTail = &res->next;
276 deferSG(SemGroup *sg)
280 res = zalloc(sizeof(*res));
281 res->which = RES_SEMGROUP;
284 ResDeferTail = &res->next;
288 ResolveProject(Parse *p, Stmt *st)
296 dassert_stmt(st, st->st_Op == ST_Import);
299 * Interpreter or Generator may reference our global internal types
300 * directly, so make sure they are all resolved.
302 ResolveClasses(st, 0);
303 for (i = 0; BaseTypeAry[i]; ++i)
304 ResolveType(BaseTypeAry[i], NULL, 0);
307 main_st = RUNE_FIRST(&st->st_List);
308 d = FindDeclId(main_st->st_MyGroup, id, &eno);
311 fprintf(stderr, "Top-level module missing main()\n");
324 while (runDeferredWork())
328 * Resolve all types registered by DLLs
333 RUNE_FOREACH(tr, &TypeRegList, tr_Node)
334 ResolveType(tr->tr_Type, NULL, 0);
337 ResolveAlignment(st, 0);
338 ResolveStorage(st, 0);
340 p->p_Format = PFMT_RESOLVED;
346 * ResolveClasses() - Resolve superclasses and do class merge
348 * This code implements the most complex feature of the language: subclassing
351 * The hardest thing we have to do is 'dup' declarations and code in order to
352 * implement subclassing and refinement. For example, a procedure defined in
353 * Integer must be dup'd for each subclass of Integer. We have to do this
354 * because storage requirements will change due to both subclassing and
355 * refinement. Even auto variables may wind up with different types between
356 * superclass and subclass.
358 * We must scan ST_Import and ST_Class statements.
362 ResolveClasses(Stmt *st, int flags)
364 SemGroup *sg = st->st_MyGroup;
367 * Resolver interlock. Assert that we aren't looping. A loop can occur
368 * if class A embeds class B and class B embeds class A (verses a pointer
371 dassert_stmt(st, (st->st_Flags & STF_RESOLVING) == 0);
372 if (st->st_Flags & STF_RESOLVED)
374 st->st_Flags |= STF_RESOLVING;
377 * If this is a subclass, integrate the superclass into it
379 if (st->st_Op == ST_Class && st->st_ClassStmt.es_Super) {
380 Type *super = st->st_ClassStmt.es_Super;
388 * Locate the superclass. 'super' does not appear in any other
389 * list.. this is a unique Type structure.
391 dassert_stmt(st, super->ty_Op == TY_UNRESOLVED);
393 resolveSuperClass(super);
394 } while (super->ty_Op == TY_UNRESOLVED);
396 dassert_stmt(st, super->ty_Op == TY_CLASS);
399 * Cleanup (XXX free qualified segments??)
401 st->st_ClassStmt.es_Super = super;
402 st->st_ClassStmt.es_Decl->d_ClassDecl.ed_Super = super;
405 * Inherit internal unsigned integer and floating point flags and a
408 sg->sg_Flags |= super->ty_ClassType.et_SemGroup->sg_Flags &
409 (SGF_ISINTEGER | SGF_ISUNSIGNED |
410 SGF_ISFLOATING | SGF_ISBOOL |
411 SGF_HASASS | SGF_GHASASS |
412 SGF_HASLVREF | SGF_GHASLVPTR |
416 * The subclass's unrestricted scope (or not), must match the
417 * super-class. Otherwise methods pulled-down from the superclass
418 * might not be compatible with the subclass.
420 if ((sg->sg_Stmt->st_ClassStmt.es_Scope.s_Flags ^
421 super->ty_ClassType.et_SemGroup->sg_Stmt->
422 st_ClassStmt.es_Scope.s_Flags) & SCOPE_UNRESTRICTED)
424 StmtFatalError(st, TOK_ERR_CLASS_UNRESTRICTED_NOMATCH);
428 * Locate the class statement associated with the superclass and
431 sst = super->ty_ClassType.et_SemGroup->sg_Stmt;
432 dassert(sst->st_MyGroup == super->ty_ClassType.et_SemGroup);
433 dassert_stmt(st, sst != NULL);
434 dassert_stmt(st, sst->st_Op == ST_Class);
436 ResolveClasses(sst, flags);
439 * Assign the sg_Level for the subclass. This is used for semantic
440 * searches when a subclass is passed to a procedure expecting the
443 sg->sg_Level = sst->st_MyGroup->sg_Level + 1;
446 * XXX Subclasses can inherit locking scope here. Currently we do
450 if (sst->u.ClassStmt.es_Decl->d_ScopeFlags & SCOPE_HARD) {
451 st->u.ClassStmt.es_Decl->d_ScopeFlags |= SCOPE_HARD;
452 } else if (st->u.ClassStmt.es_Decl->d_ScopeFlags & SCOPE_HARD) {
453 StmtFatalError(st, TOK_ERR_ILLEGAL_LOCKING_REFINEMENT);
458 * First move all the declarations from sg to tsg so we can merge the
459 * superclass properly (keep all the d_Index's correct). Note that
460 * tsg is not 100% integrated so we can only use it for search
461 * purposes. We absolutely CANNOT DupDeclaration() into tsg!
463 tsg = AllocSemGroup(SG_CLASS, sg->sg_Parse, NULL, sg->sg_Stmt);
465 while ((d = RUNE_FIRST(&sg->sg_DeclList)) != NULL) {
466 if (d->d_Id == RUNEID__T)
472 * If our sub-class does not have a _t type, then automatically
475 * Add to sg then rename so the declaration is properly initialized
476 * for sg (e.g. fields like d_Level).
479 Scope scope = INIT_SCOPE(SCOPE_REFINE);
481 d = AllocDeclaration(sg, DOP_TYPEDEF, &scope);
482 d->d_TypedefDecl.ed_Type =
483 AllocClassType(&sg->sg_ClassList, super,
484 sg->sg_Stmt->st_MyGroup, SCOPE_ALL_VISIBLE);
485 HashDecl(d, RUNEID__T);
490 * Reset count (index counter)
492 sg->sg_DeclCount = 0;
495 * Merge the superclass into this class, in sequence. Iterate through
496 * declarations in the superclass and pull them into the subclass.
497 * Figure out compatibility between super and subclasses.
499 * d - iterates the superclass nd - subclass declaration refining
500 * the superclass decl
502 RUNE_FOREACH(d, &sst->st_MyGroup->sg_DeclList, d_Node) {
506 dassert(d->d_Level != NULL &&
507 d->d_Level->sg_Level < sg->sg_Level);
510 * See if the superclass decl conflicts with a subclass decl. If
511 * there is no conflict pull it into the subclass and adjust the
512 * visibility. Note that the superclass may have duplicate ids,
513 * but they will be at different levels if so.
515 * The super linkage is required when findDecl() checks
516 * visibility of a declaration hidden relative to our subclass,
517 * but not necessarily hidden relative to the superclass.
521 rd = FindDeclRefineId(tsg, d->d_Id, &eno);
523 /* XXX proliferates decls/types? */
524 nd = DupDeclaration(sg, d);
525 dassert(d->d_Index == nd->d_Index);
526 nd->d_ScopeFlags &= ~SCOPE_ALL_VISIBLE | super->ty_Visibility;
527 nd->d_ScopeFlags &= ~SCOPE_REFINE;
530 * Superclass decl is brought in unrefined (though it might
531 * be an implied refinement depending on side-effects).
533 nd->d_SubNext = d->d_SubBase;
540 * If there is a conflict and we are not refining the superclass
541 * entity, then pull in the superclass entity and make it
542 * invisible to sg_Level searches. This could bring in multiple
543 * levels of the same id.
545 * Note that this may result in multiple ids, but they will be at
546 * different levels. In this case rd will be at the current
547 * level and nd will be at some prior level.
549 * Order is important here.
551 if ((rd->d_ScopeFlags & SCOPE_REFINE) == 0) {
552 /* XXX proliferates decls/types? */
553 nd = DupDeclaration(sg, d);
554 dassert(d->d_Index == nd->d_Index);
555 nd->d_ScopeFlags &= ~(SCOPE_ALL_VISIBLE | SCOPE_REFINE);
557 printf(" conflict, not refined, overload\n");
561 * Superclass decl is brought in unrefined (though it might
562 * be an implied refinement depending on side-effects).
564 nd->d_SubNext = d->d_SubBase;
571 * Ok, we need to refine. But the superclass may contain
572 * multiple levels of the same id. We only refine the one that
573 * is visible to us. None of these other declarations will be at
576 if ((d->d_ScopeFlags & SCOPE_ALL_VISIBLE) == 0) {
577 nd = DupDeclaration(sg, d);
578 dassert(d->d_Index == nd->d_Index);
579 nd->d_ScopeFlags &= ~(SCOPE_ALL_VISIBLE |
583 " conflict, refined (skip this one): %s\n",
588 * Superclass decl is brought in unrefined (though it might
589 * be an implied refinement depending on side-effects).
591 nd->d_SubNext = d->d_SubBase;
598 * Whew! Finally, we found the superclass decl that we wish to
599 * refine. We had better not have already refined it or there's
600 * something wrong with the algorithm.
602 * Since we inherit the superclass method's level our method will
603 * run in the superclass instead of the original, but d_Super
604 * still must be set for findDecl() to track down visibility
605 * relative to the superclass methods.
608 dassert_decl(rd, rd->d_Super == NULL);
609 dassert(d->d_Index == rd->d_Index);
610 rd->d_Level = d->d_Level;
614 * super->subclass(es) list
616 rd->d_SubNext = d->d_SubBase;
620 * This is for the superclass method access special case below.
622 if (d->d_Op == DOP_PROC) {
623 d->d_Flags |= DF_SUPERCOPY;
627 * Refinements inherit the locking mode from the superclass and
628 * are not allowed to change it.
630 if ((rd->d_ScopeFlags & SCOPE_LOCKING_MASK) &&
631 (d->d_ScopeFlags ^ rd->d_ScopeFlags) &
632 SCOPE_LOCKING_MASK) {
633 StmtFatalError(st, TOK_ERR_ILLEGAL_LOCKING_REFINEMENT);
635 rd->d_ScopeFlags |= d->d_ScopeFlags &
639 * Inherit scope from the superclass if it is not specified in
640 * the REFINE declaration (see AllocDeclaration).
642 if ((rd->d_ScopeFlags & SCOPE_ALL_VISIBLE) == 0) {
643 rd->d_ScopeFlags |= d->d_ScopeFlags & SCOPE_ALL_VISIBLE;
648 * Deal with any remaining elements in tsg. These are 'extensions'
649 * to the superclass. There may also be invisible DOP_PROC's to
650 * handle the special superclass method call case descibed above.
652 while ((rd = RUNE_FIRST(&tsg->sg_DeclList)) != NULL) {
653 if (rd->d_ScopeFlags & SCOPE_REFINE) {
654 if (rd->d_Super == NULL) {
655 char buf[RUNE_IDTOSTR_LEN];
656 fprintf(stderr, "Unable to refine %s, it does not exist "
658 runeid_text(rd->d_Id, buf));
668 * We have to special case super.method() for a refined method.
669 * Normally this makes the original method inaccessible (for
670 * storage), but we want it to work for a procedure so we make a copy
671 * in tsg. (we can't dup it directly into sg because it will screw
674 * We must not only clear the scope visibility and the temporary
675 * refine flag, we also have to clear constructor/destructor scope in
676 * the copy so only the refined constructor/destructor is called, not
677 * both the refined and the superclass constructor/destructor.
679 RUNE_FOREACH(d, &sst->st_MyGroup->sg_DeclList, d_Node) {
682 if (d->d_Flags & DF_SUPERCOPY) {
683 d->d_Flags &= ~DF_SUPERCOPY;
684 nd = DupDeclaration(sg, d);
685 nd->d_ScopeFlags &= ~(SCOPE_ALL_VISIBLE |
691 } else if (st->st_Op == ST_Class) {
698 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
699 if (d->d_Id == RUNEID__T) {
706 * If our class does not have a _t type, then automatically
707 * add it in. This is not a sub-class so do not scope it
711 Scope scope = INIT_SCOPE(0);
713 d = AllocDeclaration(sg, DOP_TYPEDEF, &scope);
714 d->d_TypedefDecl.ed_Type =
715 AllocClassType(&sg->sg_ClassList, NULL,
716 sg->sg_Stmt->st_MyGroup, SCOPE_ALL_VISIBLE);
717 HashDecl(d, RUNEID__T);
721 st->st_Flags &= ~STF_RESOLVING;
722 st->st_Flags |= STF_RESOLVED;
725 * If this is an ST_Import we must recurse through it. The only
726 * statements under an Import should be Modules. Well, really just one
727 * module. And under that module we only care about ST_Import and
728 * ST_Class statements.
730 * If this is a shared import the statement list will be empty (later it
731 * may be used for import refinement, I dunno). This is what we want
732 * since we should only resolve a shared import once.
734 if (st->st_Op == ST_Import) {
737 RUNE_FOREACH(scan, &st->st_List, st_Node) {
740 dassert_stmt(scan, scan->st_Op == ST_Module);
741 RUNE_FOREACH(scan2, &scan->st_List, st_Node) {
742 if (scan2->st_Op == ST_Import ||
743 scan2->st_Op == ST_Class) {
744 ResolveClasses(scan2, flags);
748 if (st->st_ImportStmt.es_DLL) {
749 void (*func) (void)= dlsym(st->st_ImportStmt.es_DLL,
758 * ResolveStmt() - Resolve all types, declarations, and semantic refs
760 * Resolves all types, declarations, and identifiers. Additionally this
761 * function resolves intermediate types for expressions. Storage sizes are
762 * resolved but offsets are not assigned to declarations.
764 * Returns a complexity count.
768 ResolveStmt(SemGroup *isg, Stmt *st, int flags)
771 * Process whether we detached as a thread already or not.
774 st->st_Flags |= st->st_Parent->st_Flags & STF_DIDRESULT;
777 * Deal with unresolved types here
779 if (st->st_Flags & STF_SEMANTIC) {
780 SemGroup *sg = st->st_MyGroup;
783 RUNE_FOREACH(type, &sg->sg_ClassList, ty_Node) {
784 if (type->ty_Op == TY_UNRESOLVED) {
785 resolveSuperClass(type);
791 * Resolve statements. Don't worry about declarations, those are handled
797 * This will just flag the import declaration as resolved so the code
798 * generator dives it for generation.
800 if (st->st_ImportStmt.es_Decl)
801 ResolveDecl(st->st_ImportStmt.es_Decl, 0);
805 * Recursively resolve contents
808 /* if (isg == NULL || (isg->sg_Flags & SGF_ENTRY)) */ {
811 RUNE_FOREACH(scan, &st->st_List, st_Node) {
813 * XXX pass isg for import, st_MyGroup for module??
815 ResolveStmt(st->st_MyGroup, scan, flags);
817 if (st->st_Op == ST_Import && st->st_ImportStmt.es_DLL) {
819 dlsym(st->st_ImportStmt.es_DLL, "resolveTypes");
828 ResolveDecl(st->st_ClassStmt.es_Decl, 0);
832 ResolveDecl(st->st_TypedefStmt.es_Decl, 0);
836 * Resolve declarations, skipping any whos context was moved to a
837 * class (e.g. a declaration at the top level of a file like
838 * Fd.setfd(...) also exists in the Fd class).
841 Declaration *d = st->st_DeclStmt.es_Decl;
844 for (i = 0; i < st->st_DeclStmt.es_DeclCount; ++i) {
845 if (st->st_MyGroup == d->d_MyGroup)
847 d = RUNE_NEXT(d, d_Node);
855 RUNE_FOREACH(scan, &st->st_List, st_Node) {
856 ResolveStmt(isg, scan, flags);
863 if (st->st_LoopStmt.es_Init)
864 ResolveStmt(isg, st->st_LoopStmt.es_Init, flags);
865 if (st->st_LoopStmt.es_BCond) {
867 * NOTE: BoolType global implies an rvalue.
869 st->st_LoopStmt.es_BCond =
870 ResolveExp(isg, st->st_MyGroup,
871 st->st_LoopStmt.es_BCond,
872 &BoolType, RESOLVE_AUTOCAST);
874 if (st->st_LoopStmt.es_ACond) {
876 * NOTE: BoolType global implies an rvalue.
878 st->st_LoopStmt.es_ACond =
879 ResolveExp(isg, st->st_MyGroup,
880 st->st_LoopStmt.es_ACond,
881 &BoolType, RESOLVE_AUTOCAST);
883 if (st->st_LoopStmt.es_AExp) {
885 * NOTE: VoidType global implies an rvalue.
887 st->st_LoopStmt.es_AExp =
888 ResolveExp(isg, st->st_MyGroup,
889 st->st_LoopStmt.es_AExp,
890 &VoidType, RESOLVE_AUTOCAST);
892 if (st->st_LoopStmt.es_Body) {
893 ResolveStmt(isg, st->st_LoopStmt.es_Body, flags);
895 /* remove handled in ResolveDecl DOP_PROC */
896 if ((st->st_LoopStmt.es_Body->st_Flags &
897 STF_RESOLVED) == 0) {
898 ResolveAlignment(st->st_LoopStmt.es_Body,
900 ResolveStorage(st->st_LoopStmt.es_Body, flags);
911 * NOTE: BoolType global implies an rvalue.
913 st->st_IfStmt.es_Exp = ResolveExp(isg, st->st_MyGroup,
914 st->st_IfStmt.es_Exp,
915 &BoolType, RESOLVE_AUTOCAST);
916 ResolveStmt(isg, st->st_IfStmt.es_TrueStmt, flags);
917 if (st->st_IfStmt.es_FalseStmt)
918 ResolveStmt(isg, st->st_IfStmt.es_FalseStmt, flags);
922 * NOTE: lvalue/rvalue depends on return type.
924 st->st_RetStmt.es_ProcRetType =
925 resolveReturnType(st->st_MyGroup, flags);
926 if (st->st_RetStmt.es_Exp) {
927 if (st->st_Flags & STF_DIDRESULT)
928 StmtFatalError(st, TOK_ERR_RESULT_SEQUENCING);
929 st->st_RetStmt.es_Exp =
930 ResolveExp(isg, st->st_MyGroup,
931 st->st_RetStmt.es_Exp,
932 st->st_RetStmt.es_ProcRetType,
938 * NOTE: lvalue/rvalue depends on return type.
940 if (st->st_Flags & STF_DIDRESULT)
941 StmtFatalError(st, TOK_ERR_RESULT_SEQUENCING);
942 if ((st->st_Parent->st_Flags & STF_SEMTOP) == 0)
943 StmtFatalError(st, TOK_ERR_RESULT_SEQUENCING);
944 st->st_ResStmt.es_ProcRetType =
945 resolveReturnType(st->st_MyGroup, flags);
946 if (st->st_ResStmt.es_Exp) {
947 st->st_ResStmt.es_Exp =
948 ResolveExp(isg, st->st_MyGroup,
949 st->st_ResStmt.es_Exp,
950 st->st_ResStmt.es_ProcRetType,
955 * Flag that we executed result;
958 for (scan = st; scan; scan = scan->st_Parent) {
959 scan->st_Flags |= STF_DIDRESULT;
960 scan->st_MyGroup->sg_Flags |= SGF_DIDRESULT;
961 if (scan->st_Flags & STF_SEMTOP)
968 * NOTE: Switch type must be an rvalue.
970 * NOTE: It is possible to switch on a type. See ST_Case below for
973 st->st_SwStmt.es_Exp->ex_Flags |= EXF_REQ_TYPE;
974 st->st_SwStmt.es_Exp = ResolveExp(isg, st->st_MyGroup,
975 st->st_SwStmt.es_Exp,
979 * Switch-on-expression() expects an rvalue.
981 if ((st->st_SwStmt.es_Exp->ex_Flags & EXF_RET_TYPE) == 0) {
982 st->st_SwStmt.es_Exp->ex_Type =
983 DEL_LVALUE(st->st_SwStmt.es_Exp->ex_Type);
987 RUNE_FOREACH(scan, &st->st_List, st_Node) {
988 ResolveStmt(isg, scan, flags);
994 * Handle a case/default. Note that when switching on a type, each
995 * case expression must return a type.
997 * NOTE: Case type must be an rvalue. We use the switch type to
998 * cast, so it will be.
1006 * Set type to cast cases to if we are switching on an
1007 * expression, otherwise we are switching on a type and should
1008 * not try to coerce the cases (it doesn't make sense to).
1010 dassert_stmt(st, st->st_Parent->st_Op == ST_Switch);
1011 if (st->st_Parent->st_SwStmt.es_Exp->ex_Flags & EXF_RET_TYPE)
1014 type = st->st_Parent->st_SwStmt.es_Exp->ex_Type;
1017 * case: (if es_Exp is NULL, this is a default: )
1019 if ((exp = st->st_CaseStmt.es_Exp) != NULL) {
1021 exp->ex_Flags |= EXF_REQ_TYPE;
1022 exp = ResolveExp(isg, st->st_MyGroup,
1023 exp, type, RESOLVE_AUTOCAST);
1025 dassert(exp->ex_Flags & EXF_RET_TYPE);
1026 st->st_CaseStmt.es_Exp = exp;
1030 * Elements of the case/default
1032 RUNE_FOREACH(scan, &st->st_List, st_Node) {
1033 ResolveStmt(isg, scan, flags);
1039 * NOTE: VoidType global implies an rvalue.
1041 * NOTE: If ResolveExp() doesn't cast to void for us, we will do it
1047 exp = ResolveExp(isg, st->st_MyGroup,
1048 st->st_ExpStmt.es_Exp,
1049 &VoidType, RESOLVE_AUTOCAST);
1050 if (exp->ex_Type != &VoidType) {
1051 exp = resolveExpCast(isg, st->st_MyGroup,
1052 exp, &VoidType, flags);
1054 st->st_ExpStmt.es_Exp = exp;
1061 RUNE_FOREACH(scan, &st->st_List, st_Node) {
1062 ResolveStmt(isg, scan, flags);
1066 case ST_ThreadSched:
1069 dassert_stmt(st, 0);
1073 * Calculate and propagate complexity upward.
1078 if ((sg = st->st_MyGroup) != NULL) {
1079 ++sg->sg_Complexity;
1080 if ((st->st_Flags & STF_SEMTOP) == 0 &&
1082 RUNE_NEXT(st, st_Node) == NULL) {
1083 sg->sg_Parent->sg_Complexity +=
1088 * Head of procedure needs to know if any ABI calls will be made
1089 * so it can reserve stack space.
1091 if ((st->st_Flags & STF_SEMTOP) == 0 &&
1093 sg->sg_Parent->sg_Flags |=
1094 sg->sg_Flags & SGF_ABICALL;
1101 * Locate the ST_Proc statement and resolve & return its return type
1105 resolveReturnType(SemGroup *sg, int flags __unused)
1112 * Locate the ST_Proc statement
1114 while (sg && (sg->sg_Stmt == NULL || sg->sg_Stmt->st_Op != ST_Proc))
1116 dassert(sg != NULL);
1118 d = st->st_ProcStmt.es_Decl; /* decl is already resolved */
1119 dassert_decl(d, d->d_Op == DOP_PROC);
1120 dassert_decl(d, d->d_Flags & (DF_RESOLVING | DF_RESOLVED));
1121 type = d->d_ProcDecl.ed_Type;
1122 dassert_decl(d, type->ty_Op == TY_PROC);
1123 return (type->ty_ProcType.et_RetType);
1127 resolveArgsType(SemGroup *sg, int flags __unused)
1134 * Locate the ST_Proc statement
1136 while (sg && (sg->sg_Stmt == NULL || sg->sg_Stmt->st_Op != ST_Proc))
1138 dassert(sg != NULL);
1140 d = st->st_ProcStmt.es_Decl; /* decl is already resolved */
1141 dassert_decl(d, d->d_Op == DOP_PROC);
1142 dassert_decl(d, d->d_Flags & (DF_RESOLVING | DF_RESOLVED));
1143 type = d->d_ProcDecl.ed_Type;
1144 dassert_decl(d, type->ty_Op == TY_PROC);
1145 return (type->ty_ProcType.et_ArgsType);
1149 * ResolveDecl() - resolve a declaration
1151 * If the declaration represents a procedure argument, special processing of
1152 * LVALUE scope is required to pass the declaration by reference instead of
1153 * by value. Note that the size of the underlying type DOES NOT CHANGE... it
1154 * may be much larger.
1156 * NOTE: We do not resolve d_Offset here.
1160 ResolveDecl(Declaration *d, int retry)
1164 SemGroup *sg = NULL;
1168 * Recursion detection
1170 if (d->d_Flags & DF_RESOLVED)
1172 if (d->d_Flags & DF_RESOLVING) {
1176 d->d_Flags |= DF_RESOLVING;
1179 * Resolve according to the kind of declaration
1183 if (d->d_ClassDecl.ed_Super)
1184 ResolveType(d->d_ClassDecl.ed_Super, NULL, 0);
1185 sg = d->d_ClassDecl.ed_SemGroup;
1186 ResolveSemGroup(sg, 0);
1187 if (sg->sg_Flags & SGF_RESOLVED) {
1188 d->d_Bytes = d->d_ClassDecl.ed_SemGroup->sg_Bytes;
1189 d->d_AlignMask = d->d_ClassDecl.ed_SemGroup->sg_AlignMask;
1195 * Alias access is a barrier and always returns an rvalue.
1197 * DupExp is absolutely required due to the alias's target context
1198 * being different for each consumer.
1200 type = ResolveType(d->d_AliasDecl.ed_Type, NULL, 0);
1201 if (type->ty_Flags & TF_RESOLVED)
1203 if (d->d_AliasDecl.ed_OrigAssExp) {
1204 d->d_AliasDecl.ed_AssExp =
1205 DupExp(d->d_MyGroup, d->d_AliasDecl.ed_OrigAssExp);
1206 d->d_AliasDecl.ed_AssExp =
1207 ResolveExp(d->d_ImportSemGroup, d->d_MyGroup,
1208 d->d_AliasDecl.ed_AssExp,
1212 /* handled in DOT and STRIND resolver */
1213 if ((d->d_Flags & DF_DIDEXPDUP) == 0) {
1214 d->d_Flags |= DF_DIDEXPDUP;
1215 SetDupExp(NULL, d->d_AliasDecl.ed_AssExp);
1219 d->d_Flags |= DF_RESOLVED; /* XXX */
1220 type = ResolveType(d->d_TypedefDecl.ed_Type, NULL, 0);
1221 d->d_Flags &= ~DF_RESOLVED;
1222 if (type->ty_Flags & DF_RESOLVED)
1227 * This only occurs when resolving an import's semantic group. Since
1228 * we are scanning statements in that context we do not have to
1229 * recurse here, ResolveStmt() will do it for us.
1235 * XXX global procedure, later on, make the argument a type instead
1238 * Avoid a circular loop failure when the procedure declaration
1239 * references the class it is defined in by marking the resolve
1240 * complete even if the type isn't. We can do this because the
1241 * procedure takes no field storage.
1243 ResolveMethodProcedureThisArg(d->d_MyGroup, d);
1244 ResolveType(d->d_ProcDecl.ed_Type, NULL, 0);
1248 * Deal with constructor/destructor chaining. The chaining winds up
1249 * being reversed and will be corrected by the caller.
1251 * NOTE: Constructors and destructors might be referenced without the
1252 * entire SG being resolved, so be sure to set the ABI flags here.
1254 if (d->d_ScopeFlags & SCOPE_GLOBAL) {
1255 if ((d->d_Flags & DF_ONGLIST) == 0 &&
1256 (d->d_ScopeFlags & (SCOPE_CONSTRUCTOR |
1257 SCOPE_DESTRUCTOR))) {
1258 d->d_GNext = d->d_MyGroup->sg_GBase;
1259 d->d_Flags |= DF_ONGLIST;
1260 d->d_MyGroup->sg_GBase = d;
1261 d->d_MyGroup->sg_Flags |= SGF_GABICALL;
1264 if ((d->d_Flags & DF_ONCLIST) == 0 &&
1265 (d->d_ScopeFlags & SCOPE_CONSTRUCTOR)) {
1266 d->d_CNext = d->d_MyGroup->sg_CBase;
1267 d->d_Flags |= DF_ONCLIST;
1268 d->d_MyGroup->sg_CBase = d;
1269 d->d_MyGroup->sg_Flags |= SGF_ABICALL;
1271 if ((d->d_Flags & DF_ONDLIST) == 0 &&
1272 (d->d_ScopeFlags & SCOPE_DESTRUCTOR)) {
1273 d->d_DNext = d->d_MyGroup->sg_DBase;
1274 d->d_Flags |= DF_ONDLIST;
1275 d->d_MyGroup->sg_DBase = d;
1276 d->d_MyGroup->sg_Flags |= SGF_ABICALL;
1281 * If this procedure is bound to a DLL we have to resolve it here.
1283 if (d->d_ScopeFlags & SCOPE_CLANG) {
1284 char buf[RUNE_IDTOSTR_LEN];
1286 d->d_ProcDecl.ed_DLLFunc = FindDLLSymbol(NULL, d->d_ImportSemGroup,
1287 runeid_text(d->d_Id, buf));
1290 case DOP_ARGS_STORAGE:
1291 case DOP_STACK_STORAGE:
1292 case DOP_GLOBAL_STORAGE:
1293 case DOP_GROUP_STORAGE:
1294 type = ResolveType(d->d_StorDecl.ed_Type, NULL, 0);
1297 * Complete if the underlying type is resolved.
1299 if (type->ty_Flags & TF_RESOLVED)
1303 * Promote the lvalue storage qualifier (e.g. from a typedef) into
1304 * the declaration's scope. This is what ultimately controls lvalue
1305 * vs rvalue arguments to procedures and such.
1307 if ((type->ty_SQFlags & SF_LVALUE) &&
1308 (d->d_ScopeFlags & SCOPE_LVALUE) == 0)
1310 d->d_ScopeFlags |= SCOPE_LVALUE;
1314 * If the resolve adjusted locking modes the declaration scope needs
1315 * to be adjusted. The declaration's d_Storage mechanics drive the
1318 if (type->ty_SQFlags & SF_UNTRACKED) {
1319 d->d_ScopeFlags &= ~SCOPE_LOCKING_MASK;
1320 d->d_ScopeFlags |= SCOPE_UNTRACKED;
1322 if (type->ty_SQFlags & SF_UNLOCKED) {
1323 d->d_ScopeFlags &= ~SCOPE_LOCKING_MASK;
1324 d->d_ScopeFlags |= SCOPE_UNLOCKED;
1326 if (type->ty_SQFlags & SF_SOFT) {
1327 d->d_ScopeFlags &= ~SCOPE_LOCKING_MASK;
1328 d->d_ScopeFlags |= SCOPE_SOFT;
1330 if (type->ty_SQFlags & SF_HARD) {
1331 d->d_ScopeFlags &= ~SCOPE_LOCKING_MASK;
1332 d->d_ScopeFlags |= SCOPE_HARD;
1336 * Default assignment handling expects an rvalue.
1338 if (d->d_StorDecl.ed_OrigAssExp) {
1339 d->d_StorDecl.ed_AssExp =
1340 DupExp(d->d_MyGroup, d->d_StorDecl.ed_OrigAssExp);
1341 d->d_StorDecl.ed_AssExp =
1342 ResolveExp(d->d_ImportSemGroup, d->d_MyGroup,
1343 d->d_StorDecl.ed_AssExp,
1347 if (d->d_ScopeFlags & SCOPE_LVALUE) {
1349 * Object is passed as a LValueStor structure. Note that d_Bytes
1350 * is going to be different then the underlying type (which
1351 * represents the actual object).
1353 d->d_Bytes = sizeof(LValueStor);
1354 d->d_AlignMask = LVALUESTOR_ALIGN;
1357 * Object is passed by value.
1359 d->d_AlignMask = type->ty_AlignMask;
1360 d->d_Bytes = type->ty_Bytes;
1364 * If the declaration represents or contains an argument-lvalue or a
1365 * pointer we have to add it to the SemGroup's SRBase list to
1366 * properly reference or dereference the elements. XXX only do this
1367 * for non-global storage.
1369 * If the declaration has LVALUE scope we must do the same because
1370 * the ref is tracked.
1372 if ((d->d_Flags & DF_ONSRLIST) == 0) {
1373 if (d->d_Op != DOP_GLOBAL_STORAGE &&
1374 (type->ty_Flags & TF_HASLVREF)) {
1375 d->d_SRNext = d->d_MyGroup->sg_SRBase;
1376 d->d_MyGroup->sg_SRBase = d;
1377 d->d_Flags |= DF_ONSRLIST;
1378 } else if (d->d_ScopeFlags & SCOPE_LVALUE) {
1379 d->d_SRNext = d->d_MyGroup->sg_SRBase;
1380 d->d_MyGroup->sg_SRBase = d;
1381 d->d_Flags |= DF_ONSRLIST;
1386 * Deal with constructor/destructor chaining. The chaining winds up
1387 * being reversed and will be corrected by the caller.
1389 * NOTE: Constructors and destructors might be referenced without the
1390 * entire SG being resolved, so be sure to set the ABI flags here.
1392 if ((d->d_Flags & DF_ONCLIST) == 0 &&
1393 (type->ty_Flags & TF_HASCONSTRUCT)) {
1394 d->d_CNext = d->d_MyGroup->sg_CBase;
1395 d->d_MyGroup->sg_CBase = d;
1396 d->d_MyGroup->sg_Flags |= SGF_ABICALL;
1397 d->d_Flags |= DF_ONCLIST;
1399 if ((d->d_Flags & DF_ONDLIST) == 0 &&
1400 (type->ty_Flags & TF_HASDESTRUCT)) {
1401 d->d_DNext = d->d_MyGroup->sg_DBase;
1402 d->d_MyGroup->sg_DBase = d;
1403 d->d_MyGroup->sg_Flags |= SGF_ABICALL;
1404 d->d_Flags |= DF_ONDLIST;
1406 if ((d->d_Flags & DF_ONGLIST) == 0 &&
1407 (type->ty_Flags & (TF_HASGCONSTRUCT | TF_HASGDESTRUCT))) {
1408 d->d_GNext = d->d_MyGroup->sg_GBase;
1409 d->d_MyGroup->sg_GBase = d;
1410 d->d_MyGroup->sg_Flags |= SGF_GABICALL;
1411 d->d_Flags |= DF_ONGLIST;
1416 * XXX This whole thing has changed. We don't adjust default SCOPE
1417 * or SF locking flags any more, we let the code generator and
1418 * interpreter detect that a default mode is being used.
1420 * We set content-locking defaults generically. With no SCOPE_*
1421 * flags set the default will be normally-locked (GENSTAT_LOCK).
1423 * SCOPE_UNTRACKED- GENSTAT_NONE (no ref, no lock). SCOPE_UNLOCKED -
1424 * GENSTAT_REFD SCOPE_SOFT - GENSTAT_LOCK SCOPE_HARD -
1427 * Content-locking is only applicable to an lvalue, pointer, or
1428 * reference object, but we still want to set the proper defaults
1431 * The contents of classes and arrays are never content- locked.
1432 * Compound types (that are not procedure arguments) are also not
1433 * content-locked for now.
1435 if ((d->d_Op & DOPF_STORAGE) &&
1436 (d->d_Scope.s_Flags & SCOPE_LVALUE) == 0) {
1437 if (type->ty_Op == TY_CLASS ||
1438 type->ty_Op == TY_ARYOF ||
1439 type->ty_Op == TY_COMPOUND) {
1440 d->d_ScopeFlags |= SCOPE_UNLOCKED;
1450 d->d_Flags &= ~DF_RESOLVING;
1451 d->d_Flags |= DF_RESOLVED;
1457 * Post resolution flag resolving (to handle recursion)
1462 * Create copies of procedures as they are needed (thus avoiding an
1463 * XxY matrix effect).
1465 if ((st = d->d_ProcDecl.ed_OrigBody) == NULL) {
1466 Declaration *super = d->d_Super;
1467 while (super && super->d_ProcDecl.ed_OrigBody == NULL) {
1468 super = super->d_Super;
1471 st = super->d_ProcDecl.ed_OrigBody;
1472 d->d_ProcDecl.ed_OrigBody = st;
1475 if (st && (d->d_Flags & DF_DIDPULLDOWN) == 0) {
1477 * Procedure is being used in the primary class it was defined
1478 * in or pulled into from a super-class.
1480 * Link the procedure body to the declaration and resolve the
1481 * procedure body in the context of the correct class.
1483 d->d_Flags |= DF_DIDPULLDOWN;
1484 st = DupStmt(d->d_MyGroup, st->st_Parent, st);
1485 dassert_stmt(st, d->d_ProcDecl.ed_ProcBody == NULL);
1487 d->d_ProcDecl.ed_ProcBody = st;
1488 st->st_ProcStmt.es_Decl = d;
1489 st->st_ProcStmt.es_Scope = d->d_Scope;
1491 ResolveMethodProcedureThisArg(d->d_MyGroup, d);
1492 ResolveStmt(d->d_ImportSemGroup, st, 0);
1494 ResolveAlignment(st);
1504 * __align(%d) scope qualifier, override the type's alignment
1506 if ((d->d_Scope.s_Flags & SCOPE_ALIGN) && d->d_Scope.s_AlignOverride)
1507 d->d_AlignMask = d->d_Scope.s_AlignOverride - 1;
1511 if (sg && (sg->sg_Type == SG_MODULE || sg->sg_Type == SG_CLASS)) {
1512 /* SG_COMPOUND too? maybe not */
1513 ResolveSemGroup(d->d_MyGroup, 0);
1519 * We specifically do not try to fully resolve the decl's SG, which
1520 * allows us to avoid procedures and storage which are never used.
1521 * However, the presence of constructors or destructors requires a scan.
1523 if ((d->d_MyGroup->sg_Flags & (SGF_RESOLVING | SGF_RESOLVED)) == 0) {
1526 RUNE_FOREACH(d2, &d->d_MyGroup->sg_DeclList, d_Node) {
1527 if ((d2->d_ScopeFlags &
1528 (SCOPE_CONSTRUCTOR | SCOPE_DESTRUCTOR)) &&
1529 (d2->d_Flags & DF_RESOLVED) == 0) {
1538 * ResolveExp() - resolve expression
1540 * Resolve an expression. We are expected to resolve all ex_Type's for the
1541 * expression tree as well as expected to track down operators and base
1544 * itype is a type hint. If non-NULL, the caller would like our expression
1545 * to return the specified type. There are a few special cases:
1547 * EXF_REQ_ARRAY - when OBRACKET requests an array optimization it passes a
1548 * post-array-indexed typehint (as if you had done the optimization). You
1549 * must ignore itype if you are unable to do the optimization.
1551 * NOTE: Even rvalues may have refstor side-effects at run-time.
1554 #define exFlags exp->ex_Flags
1555 #define exFlags2 exp->ex_Flags2
1556 #define exType exp->ex_Type
1557 #define exToken exp->ex_Token
1558 #define exDecl exp->ex_Decl
1559 #define exLhs exp->ex_Lhs
1560 #define exVisibility exp->ex_Visibility
1561 #define exRhs exp->ex_Rhs
1562 #define exId exp->ex_Id
1563 #define exStr exp->ex_Str
1567 ResolveExp(SemGroup *isg, SemGroup *sg, Exp *exp, Type *itype, int flags)
1571 if (exp->ex_Flags & EXF_DUPEXP)
1572 exp = DupExp(sg, exp);
1576 * Ensure that the cast target type hint is resolved.
1579 ResolveType(itype, NULL, 0);
1582 * note: certain cases below call other resolver functions and assume
1583 * that ex* variables are unchanged.
1585 dassert((exFlags & EXF_DUPEXP) || (exFlags & EXF_RESOLVED) == 0);
1590 * An assignment. Note that we optimize void returns (such as when
1591 * an assignment is a statement like 'a = 4;' ... the result of the
1592 * assignment is cast to void.
1594 * NOTE: Left-hand-side must be an LVALUE, return type inherits this
1595 * feature unless the parent turns off the bit so the TOK_ASS
1596 * run-time must deal with that.
1598 exLhs = ResolveExp(isg, sg, exLhs, NULL,
1599 flags & ~RESOLVE_AUTOCAST);
1600 dassert_exp(exLhs, exLhs->ex_Type->ty_SQFlags & SF_LVALUE);
1602 exRhs = ResolveExp(isg, sg, exRhs,
1603 DEL_LVALUE(exLhs->ex_Type),
1604 flags | RESOLVE_AUTOCAST);
1605 if (exLhs->ex_Type->ty_SQFlags & SF_CONST) {
1606 ExpFatalError(exp, TOK_ERR_READONLY);
1609 /* AssExp handles this optimization */
1610 if (itype == &VoidType) {
1612 exFlags |= EXF_RET_VOID;
1614 exType = exLhs->ex_Type;
1619 * Check @ref assignment compatibility.
1621 if (exLhs->ex_Type->ty_Op == TY_REFTO) {
1622 switch (MatchType(exLhs->ex_Type, exRhs->ex_Type)) {
1623 case SG_COMPAT_FULL:
1624 printf("assign %s compatibility FULL\n",
1627 case SG_COMPAT_PART:
1628 printf("assign %s compatibility PART\n",
1631 case SG_COMPAT_SUBCLASS:
1632 printf("assign %s compatibility SUBCL\n",
1635 case SG_COMPAT_FAIL:
1636 printf("assign %s compatibility FAIL\n",
1645 * NOTE: BoolType global implies an rvalue.
1648 exLhs = ResolveExp(isg, sg, exLhs, &BoolType,
1649 flags | RESOLVE_AUTOCAST);
1653 * If left-side can terminate the operation, mark the expression as
1654 * PROBCONST for the interpreter and code generator (allowing the rhs
1655 * to not be a constant).
1657 if (exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) {
1660 exLhs = resolveConstExpBool(isg, sg, exLhs, flags, &ts);
1661 if (ts.ts_Bool == 0)
1662 exFlags |= EXF_PROBCONST;
1667 * Resolve rhs, and we can also flag PROBCONST if both sides are
1670 exRhs = ResolveExp(isg, sg, exRhs, &BoolType,
1671 flags | RESOLVE_AUTOCAST);
1672 if ((exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) &&
1673 (exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST))) {
1674 exFlags |= EXF_PROBCONST;
1680 * NOTE: BoolType global implies an rvalue.
1683 exLhs = ResolveExp(isg, sg, exLhs, &BoolType,
1684 flags | RESOLVE_AUTOCAST);
1688 * If left-side can terminate the operation, mark the expression as
1689 * PROBCONST for the interpreter and code generator (allowing the rhs
1690 * to not be a constant).
1692 if (exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) {
1695 exLhs = resolveConstExpBool(isg, sg, exLhs, flags, &ts);
1697 exFlags |= EXF_PROBCONST;
1702 * Resolve rhs, and we can also flag PROBCONST if both sides are
1705 exRhs = ResolveExp(isg, sg, exRhs, &BoolType,
1706 flags | RESOLVE_AUTOCAST);
1707 if ((exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) &&
1708 (exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST))) {
1709 exFlags |= EXF_PROBCONST;
1715 * This synthesized token occurs when we are able to collapse a
1716 * structural indirection or dotted element into a declaration. For
1717 * example, 'module.routine'.
1719 /* XXX couldconst? */
1724 * Structual field access. The left hand side may be an object
1725 * (class or compound), a class type, or a compound type.
1727 * A dotted access requires an lvalue on the left hand side if the
1728 * left hand side represents storage.
1730 * The result will be an lvalue if the right hand side represents
1731 * storage. We only loop if the right hand side is an alias
1743 int procedureOnly = 0;
1744 int eno = TOK_ERR_ID_NOT_FOUND;
1747 * NOTE: Hint must 'always happen' since we may be modifying an
1748 * expression that will later be Dup'd.
1750 * NOTE: Lhs is always an lvalue for TOK_DOT, but does not have
1751 * to be for TOK_STRIND.
1753 exLhs->ex_Flags |= EXF_REQ_TYPE;
1754 if (exToken == TOK_DOT)
1755 exLhs->ex_Flags |= exFlags & EXF_ADDRUSED;
1756 exLhs = ResolveExp(isg, sg, exLhs, NULL,
1757 flags & ~RESOLVE_AUTOCAST);
1760 * The RHS may have been turned into a TOK_SEMGRP_ID in a
1761 * previous duplicate. The change is considered permanent.
1763 if (exRhs->ex_Token != TOK_SEMGRP_ID) {
1764 dassert_exp(exRhs, exRhs->ex_Token == TOK_STRUCT_ID);
1765 exRhs = ResolveExp(isg, sg, exRhs, NULL,
1766 flags & ~RESOLVE_AUTOCAST);
1769 type = exLhs->ex_Type;
1772 * Calculate scope and SemGroup to search. Note that it is legal
1773 * to do a structural '.' selection on a pointer, but it works
1774 * differently then indirecting through a pointer via '->'. In
1775 * the case of '.' on a pointer, we first search the system
1778 if (exLhs->ex_Flags & EXF_RET_TYPE) {
1783 * Figure out the base type used to look-up the identifier. An
1784 * identifier that resolves into a procedure winds up only being
1785 * a hint for a reference type.
1787 if (exToken == TOK_STRIND) {
1788 switch (type->ty_Op) {
1790 type = type->ty_RawPtrType.et_Type;
1793 type = type->ty_RefType.et_Type;
1797 dassert_exp(exp, 0);
1803 switch (type->ty_Op) {
1805 sg2 = type->ty_ClassType.et_SemGroup;
1808 sg2 = type->ty_CompType.et_SemGroup;
1811 sg2 = type->ty_ArgsType.et_SemGroup;
1814 sg2 = type->ty_VarType.et_SemGroup;
1817 sg2 = type->ty_ImportType.et_SemGroup;
1821 dassert_exp(exp, PointerType.ty_Op == TY_CLASS);
1822 sg2 = PointerType.ty_ClassType.et_SemGroup;
1826 dassert_exp(exp, ReferenceType.ty_Op == TY_CLASS);
1827 sg2 = ReferenceType.ty_ClassType.et_SemGroup;
1831 * Possibly a pointer, aka ptr.NULL
1835 visibility = exLhs->ex_Visibility;
1838 * Locate the identifier normally, via its type. ty_TypeVisbility
1839 * is the initial visibility (scope) that the semantic search
1840 * should use in locating the identifier.
1843 runeid_t ary[2] = { id, 0 };
1846 if (exLhs->ex_Token == TOK_ID ||
1847 exLhs->ex_Token == TOK_DECL) {
1848 if (exLhs->ex_Decl->d_Search) {
1849 level = exLhs->ex_Decl->d_Search->sg_Level;
1851 level = sg2->sg_Level;
1857 if (exLhs->ex_Flags & EXF_SUPER) {
1859 fprintf(stderr, "Can't super with reference type\n");
1860 dassert_exp(exp, 0);
1863 fprintf(stderr, "No superclass available\n");
1864 dassert_exp(exp, 0);
1869 level = sg2->sg_Level; /* may be -1 */
1871 visibility &= type->ty_Visibility;
1872 d = FindDeclPath(&exp->ex_LexRef, NULL,
1875 &visibility, level, &eno);
1877 * XXX more hack. If the super is visible and a procedure we
1878 * just found our own refinement, not the superclass method.
1879 * This is because there is no 'superclass method' per say,
1880 * refinements *REPLACE* superclass declarations and inherit
1881 * the superclass's level. However, we still want to be able
1882 * to chain method calls so what we do instead is go through
1883 * and find the procedure that we smacked when we did the
1884 * refinement. This procedure has already been conveniently
1885 * brought into the subclass context as an 'invisible' entity
1886 * at the same d_Level.
1888 if ((exLhs->ex_Flags & EXF_SUPER) && d &&
1889 d->d_Op == DOP_PROC &&
1890 (d->d_ScopeFlags & SCOPE_ALL_VISIBLE))
1892 runeid_t id2 = d->d_Id;
1893 SemGroup *olevel = d->d_Level;
1895 dassert_exp(exp, isRefTo == 0);
1897 while ((d = RUNE_NEXT(d, d_Node)) != NULL) {
1898 if (d->d_Id == id2 &&
1899 d->d_Level == olevel &&
1900 d->d_Op == DOP_PROC)
1910 if (d && procedureOnly && d->d_Op != DOP_PROC) {
1912 "PTR.ELEMENT may be used for special "
1913 "pointer method calls, but not to "
1914 "access storage elements. "
1915 "Use PTR->ELEMENT instead\n");
1916 dassert_exp(exp, 0);
1920 * If referencing actual storage the storage must be declared
1923 if (d && globalOnly && (d->d_Op & DOPF_STORAGE) &&
1924 (d->d_ScopeFlags & SCOPE_GLOBAL) == 0)
1926 char buf[RUNE_IDTOSTR_LEN];
1928 "%s is not global. Only globals can be accessed "
1930 runeid_text(d->d_Id, buf));
1931 dassert_exp(exp, 0);
1936 * Identifier found. Note that if we are going through a
1937 * reference type the declaration is not the actual one we
1938 * use at run time. It's just a template.
1942 exVisibility = visibility;
1944 if (exFlags & EXF_REQ_ADDROF)
1945 d->d_Flags |= DF_ADDROF;
1946 if (exFlags & EXF_ADDRUSED)
1947 d->d_Flags |= DF_ADDRUSED;
1950 * XXX this is in wrong place
1952 * ADDROF content-locked storage is not allowed, except for
1953 * the SCOPE_LVALUE case if the underlying type is
1956 * If we are running through a LValueStor, UNTRACKED and
1957 * UNLOCKED apply to it and not its contents. Check to see
1958 * if the contents are acceptable.
1960 if ((exFlags & EXF_REQ_ADDROF) &&
1961 (d->d_Op & DOPF_STORAGE) &&
1962 (d->d_Scope.s_Flags &
1963 (SCOPE_SOFT | SCOPE_HARD))) {
1964 type = d->d_StorDecl.ed_Type;
1965 if ((type->ty_Flags & TF_HASLVREF) &&
1966 type->ty_Op != TY_CLASS &&
1967 type->ty_Op != TY_ARYOF)
1969 ExpFatalError(exp, TOK_ERR_ILLEGAL_ADDRLOCKED);
1978 exType = d->d_ProcDecl.ed_Type;
1979 if (d->d_ProcDecl.ed_Type->ty_SQFlags & SF_METHOD) {
1981 * Method call, do not collapse the expression into a
1982 * direct declaration because the object is needed
1985 if (exLhs->ex_Flags & EXF_RET_TYPE)
1986 ExpPrintError(exLhs, TOK_ERR_METHOD_REQUIRES_OBJ);
1987 dassert((exLhs->ex_Flags & EXF_RET_TYPE) == 0);
1988 } else if (isRefTo) {
1990 * Call via reference. The lhs is required to
1991 * evaluate the actual method call at run-time.
1995 * Global method call or normal call. For the global
1996 * method case the lhs is not needed because the
1997 * parser entered the first argument as a type
2000 * Degenerate into a TOK_DECL. We depend on this
2001 * later. (mark ex_Type as parse-time for DupExp).
2003 exFlags &= ~EXF_BINARY;
2004 exFlags |= EXF_PARSE_TYPE;
2011 exType = DEL_LVALUE(d->d_AliasDecl.ed_Type);
2012 dassert_decl(d, d->d_AliasDecl.ed_AssExp != NULL);
2015 * NOTE: exLhs must be NULL if exp is unresolved. exp
2016 * tree duplications do not duplicate the alias's
2017 * exLHS even though UNARY is set.
2019 * DupExp is absolutely required due to the alias's
2020 * target context being different for each consumer.
2022 dassert_exp(exp, exRhs->ex_Lhs == NULL);
2023 exRhs->ex_Flags |= EXF_ALIAS | EXF_UNARY;
2024 exRhs->ex_Lhs = DupExp(sg2, d->d_AliasDecl.ed_AssExp);
2025 exRhs->ex_Lhs = ResolveExp(isg, sg2,
2028 flags | RESOLVE_AUTOCAST);
2030 case DOP_ARGS_STORAGE:
2031 case DOP_STACK_STORAGE:
2032 case DOP_GLOBAL_STORAGE:
2033 case DOP_GROUP_STORAGE:
2035 * Set type. The Rhs is a STRUCT_ID and does not require
2036 * a type to be assigned to it.
2038 * Return type is always an LVALUE, parent may adjust.
2040 exType = ADD_LVALUE(d->d_StorDecl.ed_Type);
2043 * Pull up global constants
2045 if (exToken == TOK_DOT &&
2046 d->d_Op == DOP_GLOBAL_STORAGE &&
2047 (d->d_ScopeFlags & SCOPE_CONSTANT) &&
2048 (exLhs->ex_Flags & EXF_RET_TYPE)) {
2049 exFlags |= EXF_PROBCONST;
2054 * XXX make sure this is only used in the lhs of a
2055 * structural reference. XXX
2057 * XXX what if we went through a TY_RETO type? This type
2060 * collapse the exp node.
2062 exType = d->d_TypedefDecl.ed_Type;
2064 exFlags &= ~EXF_BINARY;
2068 * Do not collapse an import, we require more resolution.
2069 * e.g. import.<blah> will be collapsed, but 'import'
2072 if (exFlags & EXF_REQ_TYPE) {
2075 &d->d_ImportDecl.ed_SemGroup->sg_ClassList,
2076 d->d_ImportDecl.ed_SemGroup,
2078 exFlags |= EXF_RET_TYPE;
2084 * Do not collapse a class, we require more resolution.
2085 * e.g. class.<blah> will be collapsed, but 'class'
2088 if (exFlags & EXF_REQ_TYPE) {
2091 &d->d_ClassDecl.ed_SemGroup->sg_ClassList,
2092 d->d_ClassDecl.ed_Super,
2093 d->d_ClassDecl.ed_SemGroup,
2095 exFlags |= EXF_RET_TYPE;
2100 dassert_exp(exp, 0);
2103 if (d->d_Op == DOP_PROC) {
2104 if (d->d_ScopeFlags & SCOPE_PURE)
2106 } else if (exType->ty_SQFlags & SF_CONST) {
2109 } else if ((s = SpecialSemGroupGet(id)) != 0) {
2111 * Identifier not found, check for a special identifier.
2113 exRhs->ex_Token = TOK_SEMGRP_ID;
2114 exRhs->ex_Int32 = s;
2119 dassert(type->ty_Op == TY_PTRTO || type->ty_Op == TY_REFTO);
2120 /* NULL is not an lvalue */
2121 exType = DEL_LVALUE(type);
2122 exFlags |= EXF_NULL;
2125 dassert(type->ty_Op != TY_PTRTO && type->ty_Op != TY_REFTO);
2126 exType = &Int32Type;
2131 * typeof(self.__data[]) vs (cast)self.__data[]
2133 dassert(type->ty_Op != TY_PTRTO && type->ty_Op != TY_REFTO);
2134 dassert(exFlags & EXF_REQ_ARRAY);
2135 exFlags |= EXF_RET_ARRAY;
2136 if (s == SPECIAL_TYPE) {
2137 exFlags |= EXF_RET_TYPE;
2138 exType = &DynamicLValueType;
2139 } else if (exFlags & EXF_REQ_TYPE) {
2140 exFlags |= EXF_RET_TYPE;
2141 exType = &DynamicLValueType;
2146 * dynamic data must be cast
2148 dassert_exp(exp, 0);
2149 exType = &DynamicLValueType;
2152 case SPECIAL_VAR_COUNT:
2153 dassert(type->ty_Op != TY_PTRTO && type->ty_Op != TY_REFTO);
2154 exType = &Int32Type;
2155 sg->sg_Flags |= SGF_ABICALL;
2157 case SPECIAL_VAR_TYPE:
2158 case SPECIAL_VAR_DATA:
2160 * typeof(self.__vardata[]) vs (cast)self.__vardata[]
2162 dassert(type->ty_Op != TY_PTRTO && type->ty_Op != TY_REFTO);
2163 dassert(exFlags & EXF_REQ_ARRAY);
2164 exFlags |= EXF_RET_ARRAY;
2165 if (s == SPECIAL_TYPE) {
2166 exFlags |= EXF_RET_TYPE;
2167 exType = &DynamicLValueType;
2168 } else if (exFlags & EXF_REQ_TYPE) {
2169 exFlags |= EXF_RET_TYPE;
2170 exType = &DynamicLValueType;
2175 * dynamic data must be cast
2177 dassert_exp(exp, 0);
2178 exType = &DynamicLValueType;
2180 sg->sg_Flags |= SGF_ABICALL;
2182 case SPECIAL_TYPEID:
2183 exType = &Int32Type;
2185 case SPECIAL_TYPESTR:
2189 dassert_exp(exRhs, 0);
2194 * This is nasty, I admit. If we have a pointer or reference
2198 if (type->ty_Op == TY_REFTO) {
2199 type = type->ty_RefType.et_Type;
2203 if (type->ty_Op == TY_PTRTO) {
2204 type = type->ty_RawPtrType.et_Type;
2208 ExpFatalError(exRhs, eno);
2212 dassert_exp(exp, exType != NULL);
2216 * NOTE: unresolved identifiers should not have alias expression
2217 * sub-tree duplications attached to them. assert it.
2219 dassert_exp(exp, exLhs == NULL);
2223 * NOTE: LVALUE/RVALUE for elements and return type depends on the
2224 * operator. Operator functions normally self-optimize the cases at
2228 exp = resolveExpOper(isg, sg, exp, itype,
2229 flags & ~RESOLVE_AUTOCAST);
2233 * Indirect through an expression.
2235 * Return type is typically an LVALUE (if representing storage). Exp
2236 * parent might turn it off so run-time must test. Lhs may or may
2242 exLhs = ResolveExp(isg, sg, exLhs, NULL,
2243 flags & ~RESOLVE_AUTOCAST);
2244 type = exLhs->ex_Type;
2246 switch (type->ty_Op) {
2248 if ((exFlags & EXF_INDREF) == 0) {
2249 fprintf(stderr, "You cannot use '*' on a reference type\n");
2250 dassert_exp(exLhs, 0);
2252 exType = ADD_LVALUE(type->ty_RefType.et_Type);
2255 exType = ADD_LVALUE(type->ty_RawPtrType.et_Type);
2258 dassert_exp(exLhs, 0);
2265 * Take the address of an (LVALUE) expression. Returns an RVALUE.
2266 * Allow for a short-cut optimization which replaces the TOK_ADDR
2267 * sequence with its argument in the &ary[n] case.
2273 * Hint must 'always happen' since we may be modifying an
2274 * expression that will later be Dup'd.
2276 * It is sufficient to test EXF_ADDRUSED to determine if
2277 * SRSGET/SRSPUT is needed for the procedure.
2279 exLhs->ex_Flags |= EXF_REQ_ADDROF | EXF_ADDRUSED;
2280 exLhs = ResolveExp(isg, sg, exLhs, NULL,
2281 flags & ~RESOLVE_AUTOCAST);
2282 if (exLhs->ex_Flags & EXF_RET_ADDROF) {
2285 type = exLhs->ex_Type;
2286 dassert_exp(exLhs, type->ty_SQFlags & SF_LVALUE);
2287 exType = ResolveType(TypeToRawPtrType(type), NULL, 0);
2288 /* DEL_LVALUE() not needed here */
2294 * Array index, takes an RVALUE, returns an LVALUE.
2296 * Note: we have to convert the special __data[exp] case.
2298 * Note: ex_Flags hints must 'always happen' since we may be
2299 * modifying an expression that will later be Dup'd.
2301 exRhs = ResolveExp(isg, sg, exRhs, NULL,
2302 flags & ~RESOLVE_AUTOCAST);
2303 if (exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) {
2304 exRhs = resolveConstExp(isg, sg, exRhs,
2305 flags | RESOLVE_FAILOK);
2307 exLhs->ex_Flags |= EXF_REQ_ARRAY | (exFlags & EXF_REQ_TYPE);
2308 exLhs->ex_Flags |= EXF_ADDRUSED /* | (exFlags & EXF_REQ_ADDROF) */ ;
2309 exLhs->ex_AuxExp = exRhs;
2310 exLhs = ResolveExp(isg, sg, exLhs, itype,
2311 flags & ~RESOLVE_AUTOCAST);
2314 * If we are indexing an actual array we have to retain EXF_ADDRUSED
2315 * to prevent it from being cached in a register. Otherwise we are
2316 * indirecting through a pointer and not taking the address of the
2317 * pointer itself. (tests/cat.d uses gets() which is a good test of
2320 if (exLhs->ex_Type && exLhs->ex_Type->ty_Op != TY_ARYOF)
2321 exLhs->ex_Flags &= ~(EXF_ADDRUSED | EXF_REQ_ADDROF);
2323 if (MatchType(&IntegralType, exRhs->ex_Type) >=
2325 ExpPrintError(exRhs, TOK_ERR_EXPECTED_INTEGRAL_TYPE);
2326 dassert_exp(exp, 0);
2329 if (exLhs->ex_Flags & EXF_RET_ARRAY) {
2331 * __data and __vardata specials
2333 /* don't modify ex_Token, EXF_DUPEXP might be set */
2334 /* exp->ex_Token = TOK_ERR_EXP_REMOVED; */
2336 } else if (exFlags & EXF_REQ_ADDROF) {
2338 * &ary[i] optimization - allows us to create a bounded pointer
2339 * (returns an RVALUE).
2341 * XXX now we just return a raw pointer
2345 exFlags |= EXF_RET_ADDROF;
2347 dassert((exLhs->ex_Flags & EXF_RET_TYPE) == 0);
2349 exLhs->ex_AuxExp = NULL;
2350 type = exLhs->ex_Type;
2352 switch (type->ty_Op) {
2354 type = type->ty_AryType.et_Type;
2357 type = type->ty_RawPtrType.et_Type;
2360 /* Cannot take address of a reference type */
2361 dassert_exp(exp, 0);
2364 exType = ResolveType(TypeToRawPtrType(type), NULL, 0);
2365 /* returns an RVALUE */
2368 * Unoptimized array lookup, returns an lvalue
2372 dassert((exLhs->ex_Flags & EXF_RET_TYPE) == 0);
2374 exLhs->ex_AuxExp = NULL;
2375 type = exLhs->ex_Type;
2377 switch (type->ty_Op) {
2379 type = type->ty_AryType.et_Type;
2382 type = type->ty_RawPtrType.et_Type;
2386 "Cannot index a reference type\n");
2387 dassert_exp(exp, 0);
2390 exType = ADD_LVALUE(type);
2391 /* returns an LVALUE */
2395 dassert_exp(exp, 0); /* XXX */
2400 * XXX we should return a bounded pointer here.
2403 exFlags |= EXF_CONST;
2405 if ((exFlags2 & EX2F_ESCDONE) == 0) {
2408 exFlags2 |= EX2F_ESCDONE;
2409 str = StrTableEscapeQuotedString(exStr, strlen(exStr), 1);
2410 ReplaceStrTable(&exp->ex_Str, str);
2415 * Set EXF_PARSE_TYPE to make sure that ex_Type survives DupExp().
2417 * exp->u.uint32 is always set to the single-quoted result
2421 exFlags |= EXF_CONST | EXF_PARSE_TYPE;
2422 dassert(exType != NULL);
2426 * Integer and related type is already loaded into the exp
2429 exFlags |= EXF_CONST;
2430 dassert(exType != NULL);
2434 * Float and related type is already loaded into the exp
2437 exFlags |= EXF_CONST;
2438 dassert(exType != NULL);
2445 * The self identifier represents the current procedure's arguments.
2446 * A varargs procedure will actually be called with an extended
2447 * version of this type, but for resolution purposes we can use this
2450 * This is an LVALUE to support things like self.new() XXX.
2452 exType = ADD_LVALUE(resolveArgsType(sg, flags));
2456 * The '$' identifier represents the current procedure's return
2459 if (sg->sg_Flags & SGF_DIDRESULT)
2460 ExpFatalError(exp, TOK_ERR_RESULT_SEQUENCING);
2461 exType = ADD_LVALUE(resolveReturnType(sg, flags));
2466 * Lookup the identifier. The returned declaration could represent a
2467 * class, typedef, module, or storage, but for this case we only
2468 * allow storage or a constant. Since we are starting from our own
2469 * semantic group, visibility is initially ALL (private, library, and
2472 * The identifier might represent something at a higher scoping
2473 * layer. For example, a nested procedure accessing a variable in
2474 * the parent procedure or a method procedure in a class accessing an
2475 * element of the object.
2477 * It is also possible for the current execution scoping layer (sg)
2478 * to have a secondary contextual layer from which global constants
2479 * can be accessed. This is typically set when resolving procedure
2480 * arguments for procedures called through objects or types. Only
2481 * type globals can be accesed via this shortcut.
2483 * This returns an LVALUE if the id represents storage.
2487 int eno = TOK_ERR_ID_NOT_FOUND;
2492 * Special case 'super'. XXX TY_REFTO
2494 * Make an in-place change to the expression structure. 'super'
2495 * is actually 'this' with the EXF_SUPER flag set.
2497 if (exId == RUNEID_SUPER) {
2499 exFlags |= EXF_SUPER;
2501 ary[0] = exp->ex_Id;
2504 exDecl = FindDeclPath(&exp->ex_LexRef, isg, sg,
2506 FDC_NULL, &exVisibility,
2508 if (exDecl == NULL) {
2509 exDecl = FindDeclPathAltContext(
2510 &exp->ex_LexRef, isg, sg,
2512 FDC_NULL, &exVisibility,
2515 if (exDecl == NULL) {
2516 ExpPrintError(exp, eno);
2517 dassert_exp(exp, 0);
2521 * The EXF flag is set by TOK_ADDR, possibly propagated down via
2522 * TOK_DOT. Use this to flag that the stack context might be
2523 * used outside of its normal life. LValue scoped declarations
2524 * do not count because they have their own RefStor.
2526 * (This code is primarily responsible for causing SRSGET and
2527 * SRSPUT instructions to be emitted).
2529 if ((exFlags & EXF_ADDRUSED) &&
2530 (exDecl->d_Scope.s_Flags & SCOPE_LVALUE) == 0)
2532 exDecl->d_MyGroup->sg_Flags |= SGF_ADDRUSED;
2536 * We have to resolve the declaration here, we no longer have the
2537 * redundancy to resolve it elsewhere.
2540 if ((exDecl->d_Flags & DF_RESOLVING) == 0)
2541 ResolveDecl(exDecl, 0);
2546 * Try to delay resolving the procedure declaration (which will
2547 * resolve the procedure body). We cannot delay the resolution
2548 * if resolving a constant that the resolver needs immediately.
2550 if (flags & RESOLVE_CONSTEXP) {
2551 ResolveDecl(exDecl, 0);
2557 * Taking the address of content-locked storage is illegal.
2559 * If we are running through an LValueStor, UNTRACKED and UNLOCKED
2560 * apply to it and not its contents. Check to see if the contents
2563 if ((exFlags & EXF_REQ_ADDROF) &&
2564 (exDecl->d_Scope.s_Flags & (SCOPE_SOFT | SCOPE_HARD))) {
2565 Type *type = exDecl->d_StorDecl.ed_Type;
2566 if ((type->ty_Flags & TF_HASLVREF) &&
2567 type->ty_Op != TY_CLASS &&
2568 type->ty_Op != TY_ARYOF) {
2569 ExpPrintError(exp, TOK_ERR_ILLEGAL_ADDRLOCKED);
2570 dassert_exp(exp, 0);
2574 switch (exDecl->d_Op) {
2575 case DOP_ARGS_STORAGE:
2576 if (sg->sg_Flags & SGF_DIDRESULT)
2577 ExpFatalError(exp, TOK_ERR_RESULT_SEQUENCING);
2579 case DOP_STACK_STORAGE:
2580 case DOP_GLOBAL_STORAGE:
2581 case DOP_GROUP_STORAGE:
2583 * Storage identifiers are lvalues.
2585 * Try to delay this step, giving the language more flexibility
2586 * in avoiding resolver loops from interdependencies that can
2589 * We can't delay this step when resolving an expression that the
2590 * resolver needs an actual constant result for.
2592 exType = ADD_LVALUE(exDecl->d_StorDecl.ed_Type);
2593 if (exFlags & EXF_ADDRUSED)
2594 exDecl->d_Flags |= DF_ADDRUSED;
2595 if (exFlags & EXF_REQ_ADDROF)
2596 exDecl->d_Flags |= DF_ADDROF;
2597 if (exType->ty_SQFlags & SF_CONST)
2600 if (flags & RESOLVE_CONSTEXP) {
2601 Exp **asexpp = &exDecl->d_StorDecl.ed_AssExp;
2603 *asexpp = DupExp(sg, *asexpp);
2604 *asexpp = ResolveExp(isg, sg, *asexpp,
2606 flags | RESOLVE_AUTOCAST);
2607 *asexpp = SetDupExp(sg, *asexpp);
2614 * Aliases are rvalues (even if they could be lvalues).
2616 exType = DEL_LVALUE(exDecl->d_AliasDecl.ed_Type);
2617 exFlags |= EXF_ALIAS | EXF_UNARY;
2620 * NOTE: exLhs must be NULL if exp is unresolved. exp tree
2621 * duplications do not duplicate the alias's exLHS even though
2622 * UNARY is set. However, because we probably have not actually
2623 * duplicated exp yet, we have to clear the field in our pre-dup
2626 * NOTE: DupExp is absolutely required due to the alias's target
2627 * context being different for each consumer.
2629 if (exFlags & EXF_DUPEXP)
2631 dassert_exp(exp, exLhs == NULL);
2632 exLhs = DupExp(sg, exDecl->d_AliasDecl.ed_AssExp);
2633 exLhs = ResolveExp(isg, sg, exLhs, exType,
2634 flags | RESOLVE_AUTOCAST);
2637 * Inherit EXF_NULL (NULL pointer special) through the alias,
2638 * otherwise it will not be assignable to arbitrary pointers.
2640 exFlags |= exLhs->ex_Flags & EXF_NULL;
2645 * A procedural identifier.
2647 * Note: procedural pointers cannot be changed so they are not
2650 dassert_exp(exp, (exFlags & EXF_REQ_PROC));
2651 exType = exDecl->d_ProcDecl.ed_Type;
2652 if (exDecl->d_ScopeFlags & SCOPE_PURE)
2656 if (exFlags & EXF_REQ_TYPE) {
2657 exType = exDecl->d_TypedefDecl.ed_Type;
2658 exFlags |= EXF_RET_TYPE;
2661 dassert_exp(exp, 0);
2664 if (exFlags & EXF_REQ_TYPE) {
2667 &exDecl->d_ClassDecl.ed_SemGroup->sg_ClassList,
2668 exDecl->d_ClassDecl.ed_Super,
2669 exDecl->d_ClassDecl.ed_SemGroup,
2671 exFlags |= EXF_RET_TYPE;
2674 dassert_exp(exp, 0);
2677 if (exFlags & EXF_REQ_TYPE) {
2680 &exDecl->d_ImportDecl.ed_SemGroup->sg_ClassList,
2681 exDecl->d_ImportDecl.ed_SemGroup,
2683 exFlags |= EXF_RET_TYPE;
2686 dassert_exp(exp, 0);
2689 dassert_exp(exp, 0);
2694 * NOTE: BoolType global implies an rvalue.
2697 exLhs = ResolveExp(isg, sg, exLhs, &BoolType,
2698 flags | RESOLVE_AUTOCAST);
2701 if (exFlags & EXF_REQ_TYPE) {
2702 ResolveType(exType, NULL, 0);
2703 exFlags |= EXF_RET_TYPE;
2705 dassert_exp(exp, 0);
2710 * User cast (or maybe the parser inserted it). Try to resolve the
2711 * expression with the requested type hint but tell ResolveExp() not
2712 * to force the cast.
2714 * Then check the result. If ResolveExp() was not able to optimize
2715 * the requested cast then resolve the cast.
2717 * If the types are compatible we still keep the TOK_CAST node in
2718 * place for the moment. XXX we really need to formalized how
2719 * ex_Type is set Similar vs Exact.
2721 * NOTE: Cast results are always an RVALUE. XXX validate here.
2724 if ((exFlags & EXF_PARSE_TYPE) == 0) {
2725 exRhs->ex_Flags |= EXF_REQ_TYPE;
2726 exRhs = ResolveExp(isg, sg, exRhs, NULL,
2727 flags & ~RESOLVE_AUTOCAST);
2728 exType = exRhs->ex_Type;
2730 exLhs = ResolveExp(isg, sg, exLhs, exType,
2731 flags & ~RESOLVE_AUTOCAST);
2732 if (SimilarType(exType, exLhs->ex_Type) == 0) {
2733 exp = resolveExpCast(isg, sg, exLhs, exType, flags);
2736 /* propagate NULL flag to allow cast to any pointer type */
2737 if (exLhs->ex_Flags & EXF_NULL)
2738 printf("LHS NULL\n");
2739 exp->ex_Flags |= exLhs->ex_Flags & EXF_NULL;
2744 * Calls require the RHS to be a compound expression representing the
2745 * procedure arguments.
2747 * XXX deal with pointer-to-function verses function XXX the lhs must
2748 * at the moment resolve to the procedure itself.
2750 * In regards to procedure pointers, the declaration will require a
2751 * pointer to the procedure's statement body. XXX this pointer can
2752 * be the physical storage associated with the lhs data but thus
2753 * requires the type to be a pointer. We do not support the 'C'
2754 * (*ptr_to_func)(...) form. You have to use ptr_to_func(...).
2758 Type *atype; /* type for alt context */
2759 SemGroup *save_asg; /* save old alt context */
2761 dassert_exp(exRhs, exRhs->ex_Token == TOK_COMPOUND);
2764 * Note: ex_Flags hints must 'always happen' since we may be
2765 * modifying an expression that will later be Dup'd.
2767 exLhs->ex_Flags |= EXF_REQ_PROC;
2768 exLhs->ex_Flags |= EXF_ADDRUSED;
2769 exLhs = ResolveExp(isg, sg, exLhs, NULL,
2770 flags & ~RESOLVE_AUTOCAST);
2771 ltype = exLhs->ex_Type;
2772 dassert_exp(exLhs, ltype != NULL &&
2773 ltype->ty_Op == TY_PROC);
2774 dassert_exp(exLhs, exLhs->ex_Decl != NULL);
2775 dassert_exp(exRhs, exRhs->ex_Token == TOK_COMPOUND);
2778 * If the lhs type indicates a method procedure, then it's lhs
2779 * is the object we wish to pass as the first argument to the
2780 * method. We dup the lhs exp. For a STRIND TY_PTRTO
2781 * method call we indirect the element and convert it to a
2782 * TOK_DOT lvalue argument of the underlying object.
2784 * A method call via a reference object is a very weird case.
2786 * Since the method called through an object winds up being a
2787 * method tailored for that object, and we are calling through a
2788 * reference to an object, the actual method will be looked up at
2789 * run time and will match the object. Thus we can safely
2790 * indirect through the reference object for this one case. Since
2791 * (*ref_obj) is not normally allowed this will be special-cased
2792 * at compile-time or run-time.
2794 * Note that this occurs before we evaluate the compound
2795 * expression on the right hand side. Also note that since the
2796 * resolver can be called multiple times on a shared expression,
2797 * we have to be careful to shift the arguments around only once.
2799 if ((ltype->ty_SQFlags & SF_METHOD) &&
2800 (exRhs->ex_Flags & EXF_CALL_CONV) == 0)
2804 exRhs->ex_Flags |= EXF_CALL_CONV;
2806 switch (exLhs->ex_Token) {
2807 case TOK_STRIND: /* indirect */
2809 * Calling through a ref or pointer
2815 * blah e.g.func_id (resolved)
2819 * NOTE: Do not set EXF_RESOLVED, we need to call the
2820 * resolver to properly propagate ADDRUSED.
2822 lhs = exLhs->ex_Lhs;
2823 methodCheckThisId(ltype, lhs);
2825 if (methodProcThisIsPointer(ltype, lhs)) {
2827 } else if (lhs->ex_Type->ty_Op == TY_PTRTO) {
2828 Exp *nexp = AllocExp(NULL);
2831 nexp->ex_Token = TOK_PTRIND;
2832 nexp->ex_Type = ADD_LVALUE(
2833 lhs->ex_Type->ty_RawPtrType.et_Type);
2834 nexp->ex_Flags |= EXF_UNARY;
2835 LexDupRef(&lhs->ex_LexRef, &nexp->ex_LexRef);
2836 exLhs->ex_Token = TOK_DOT;
2838 } else if (lhs->ex_Type->ty_Op == TY_REFTO) {
2839 Exp *nexp = AllocExp(NULL);
2842 nexp->ex_Token = TOK_PTRIND;
2843 nexp->ex_Type = ADD_LVALUE(
2844 lhs->ex_Type->ty_RefType.et_Type);
2845 nexp->ex_Flags |= EXF_UNARY | EXF_INDREF;
2846 LexDupRef(&lhs->ex_LexRef, &nexp->ex_LexRef);
2849 dassert_exp(lhs, 0);
2855 * Calling via '.', e.g. stdin->fs.importdesc().
2856 * Take the address of stdin->fs, which will give
2857 * us a pointer rather than a reference. It is not
2858 * possible to obtain a reference from an embedded type.
2859 * This will trigger resolution of the pointer *this
2860 * of the method rather than the @this version.
2862 * If this is a pointer or reference, it will match the
2863 * built-in methods for PointerType and ReferenceType.
2865 * Pass directly as an lvalue. If this is a pointer or
2866 * reference only the builtin methods for the Pointer
2867 * or Reference class are possible. These methods
2868 * require a content-locked reference.
2870 lhs = exLhs->ex_Lhs;
2871 if (lhs->ex_Type->ty_Op == TY_CLASS) {
2872 Exp *nexp = AllocExp(NULL);
2875 nexp->ex_Token = TOK_ADDR;
2876 nexp->ex_Type = TypeToRawPtrType(lhs->ex_Type);
2877 nexp->ex_Flags |= EXF_UNARY;
2878 LexDupRef(&lhs->ex_LexRef, &nexp->ex_LexRef);
2881 if (lhs->ex_Type->ty_Op != TY_PTRTO &&
2882 lhs->ex_Type->ty_Op != TY_REFTO) {
2887 dassert_exp(exp, 0);
2893 * Make sure atype survives DupExp().
2895 lhs->ex_Flags |= EXF_PARSE_TYPE;
2896 atype = lhs->ex_Type;
2899 * Leave the lhs intact, but set the duplication flag in case
2900 * things get nasty later (they may have already, actually).
2902 exLhs->ex_Lhs = SetDupExp(sg, exLhs->ex_Lhs);
2903 lhs->ex_Next = exRhs->ex_Lhs;
2904 exRhs->ex_Lhs = lhs;
2905 } else if (ltype->ty_SQFlags & SF_METHOD) {
2908 lhs = exRhs->ex_Lhs;
2909 atype = lhs->ex_Type;
2915 * Try to set an alternative search context during resolution of
2916 * the procedure arguments. This context is only searched if an
2917 * identifier cannot be found through normal means so local
2918 * variables and such will override it as the programmer should
2919 * expect. Since the local semantic stack is under the
2920 * programmer's control, unexpected collisions should either not
2921 * occur or be easily fixed.
2924 switch (atype->ty_Op) {
2926 atype = atype->ty_RefType.et_Type;
2929 atype = atype->ty_RawPtrType.et_Type;
2932 if (atype->ty_Op != TY_CLASS)
2936 save_asg = sg->sg_AltContext;
2937 sg->sg_AltContext = atype->ty_ClassType.et_SemGroup;
2943 * Resolve the right hand side, which are the procedure arguments
2944 * as a compound type. This can get tricky. XXX
2946 * NOTE: We inherit the SF_LVALUE flag from the return type.
2947 * Parent might turn it off.
2949 /* d = exLhs->ex_Decl; */
2950 exRhs = ResolveExp(isg, sg, exRhs,
2951 ltype->ty_ProcType.et_ArgsType,
2952 flags | RESOLVE_AUTOCAST);
2953 exType = ltype->ty_ProcType.et_RetType;
2956 * Restore AltContext after resolving rhs.
2958 sg->sg_AltContext = save_asg;
2959 } else if ((exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) &&
2960 (exLhs->ex_Decl->d_ScopeFlags & SCOPE_PURE)) {
2962 * atype NULL (not method call, which requires an object),
2963 * arguments can become constants, pure function, so result
2964 * can become a constant.
2966 exFlags |= EXF_PROBCONST;
2970 * Additional work to inline the procedure
2972 resolveDynamicProcedure(isg, sg, exp, flags);
2973 resolveProcedureInline(isg, sg, exp, flags);
2976 case TOK_INLINE_CALL:
2978 * An inlined call has already resolved via TOK_CALL. It will not be
2979 * a constant, and any argument modifications have already been
2985 Type *atype; /* type for alt context */
2986 SemGroup *save_asg; /* save old alt context */
2988 exLhs->ex_Flags |= EXF_REQ_PROC;
2989 exLhs->ex_Flags |= EXF_ADDRUSED;
2990 exLhs = ResolveExp(isg, sg, exLhs, NULL,
2991 flags & ~RESOLVE_AUTOCAST);
2993 ltype = exLhs->ex_Type;
2997 * Try to set an alternative search context during resolution of
2998 * the procedure arguments. This context is only searched if an
2999 * identifier cannot be found through normal means so local
3000 * variables and such will override it as the programmer should
3001 * expect. Since the local semantic stack is under the
3002 * programmer's control, unexpected collisions should either not
3003 * occur or be easily fixed.
3005 if (ltype->ty_SQFlags & SF_METHOD) {
3008 rhs = exRhs->ex_Lhs;
3009 atype = rhs->ex_Type;
3014 switch (atype->ty_Op) {
3016 atype = atype->ty_RefType.et_Type;
3019 atype = atype->ty_RawPtrType.et_Type;
3022 if (atype->ty_Op != TY_CLASS)
3026 save_asg = sg->sg_AltContext;
3027 sg->sg_AltContext = atype->ty_ClassType.et_SemGroup;
3031 exRhs = ResolveExp(isg, sg, exRhs,
3032 ltype->ty_ProcType.et_ArgsType,
3033 flags | RESOLVE_AUTOCAST);
3036 sg->sg_AltContext = save_asg;
3039 exType = ltype->ty_ProcType.et_RetType;
3040 ResolveStmt(d->d_ImportSemGroup, exp->ex_AuxStmt, flags);
3045 * (NOTE EARLY RETURN)
3047 * A compound expression should always be an RVALUE, but might
3048 * contain LVALUEs (XXX).
3051 exp = resolveCompoundExp(isg, sg, exp, itype, flags);
3056 * (NOTE EARLY RETURN)
3059 exp = resolveBracketedExp(isg, sg, exp, itype, flags);
3064 * The caller must be able to handle a type return when typeof() is
3067 dassert_exp(exp, exFlags & EXF_REQ_TYPE);
3072 * If an expression was supplied, convert it to a type.
3074 * NOTE: ex_Flags hints must 'always happen' since we may be
3075 * modifying an expression that will later be Dup'd.
3078 if ((exFlags & EXF_RET_TYPE) == 0) {
3079 dassert(exLhs != NULL);
3080 exLhs->ex_Flags |= EXF_REQ_TYPE;
3081 exLhs = ResolveExp(isg, sg, exLhs, NULL,
3082 flags & ~RESOLVE_AUTOCAST);
3083 exType = exLhs->ex_Type;
3085 /* do not clear EXF_UNARY, messes up tmp exp storage */
3086 /* exFlags &= ~EXF_UNARY; */
3088 exFlags |= EXF_RET_TYPE;
3089 /* XXX delete the lhs */
3091 ResolveType(exType, NULL, 0);
3095 * Create appropriate integer constants for sizeof() and
3100 exp->ex_Token = TOK_INTEGER;
3101 exp->ex_Tmp.ts_USize = exType->ty_Bytes;
3102 exType = &USizeType;
3103 exFlags &= ~EXF_RET_TYPE;
3104 exFlags |= EXF_CONST;
3107 dassert_exp(exp, (exType->ty_Flags & TF_RESOLVING) == 0);
3108 dassert_exp(exp, exType->ty_Op == TY_ARYOF);
3109 if (exType->ty_AryType.et_Type->ty_Bytes) {
3110 exp->ex_Tmp.ts_USize = exType->ty_Bytes /
3111 exType->ty_AryType.et_Type->ty_Bytes;
3113 exp->ex_Tmp.ts_USize = 0;
3115 exp->ex_Token = TOK_INTEGER;
3116 exType = &USizeType;
3117 exFlags &= ~EXF_RET_TYPE;
3118 exFlags |= EXF_CONST;
3122 /* type is returned */
3127 dassert_exp(exp, 0);
3132 * Ensure that the cast target type is resolved.
3135 ResolveType(exType, NULL, 0);
3136 /* XXX exType was ex_Type */
3139 * If the type hint did not succeed we may have to cast the
3140 * expression to the requested type. Note that if the itype was set
3141 * as part of an array optimization request which could not be
3142 * handled, we must ignore itype.
3144 * Note that SimilarType() will allow exp->ex_Type to be a var-args
3145 * TY_ARGS, and since the original Rhs of a call is set to the
3146 * procedure arguments type, VarType.et_Type should match exactly.
3149 (exFlags & (EXF_REQ_ARRAY | EXF_RET_ARRAY)) != EXF_REQ_ARRAY)
3151 if ((itype->ty_Flags & TF_RESOLVED) == 0)
3152 ResolveType(itype, NULL, 0);
3153 if ((itype->ty_SQFlags & SF_LVALUE) &&
3154 (exType->ty_SQFlags & SF_LVALUE) == 0
3157 fprintf(stderr, "Exp must be an lvalue here\n");
3158 dassert_exp(exp, 0);
3161 if (!SimilarType(itype, exType) &&
3162 (flags & RESOLVE_AUTOCAST)) {
3163 if (exp->ex_Flags & EXF_DUPEXP) {
3164 Exp *nexp = AllocExp(NULL);
3166 nexp->ex_Tmp = exp->ex_Tmp;
3167 LexDupRef(&exp->ex_LexRef, &nexp->ex_LexRef);
3169 exFlags &= ~EXF_DUPEXP;
3170 /* exp = DupExp(sg, exp); */
3172 exFlags |= EXF_RESOLVED;
3173 exp = resolveExpCast(isg, sg, exp, itype,
3180 * Generic constant evaluation flag. Note that EXF_PROBCONST could also
3181 * be set above (TOK_CALL).
3184 (exLhs == NULL || (exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST))) &&
3185 (exRhs == NULL || (exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)))) {
3186 exp->ex_Flags |= EXF_PROBCONST;
3188 exp->ex_Flags |= EXF_RESOLVED;
3194 * Resolve an expression for which the resolver needs the result immediately.
3197 resolveConstExp(SemGroup *isg, SemGroup *sg, Exp *exp, int flags)
3199 urunesize_t tmpbytes;
3200 urunesize_t tmpalign;
3201 srunesize_t ooffset;
3204 flags &= ~RESOLVE_AUTOCAST;
3206 if ((exp->ex_Flags & EXF_RESOLVED) == 0) {
3207 exp = ResolveExp(isg, sg, exp, NULL, flags);
3210 /* XXX can't do this atm, it messes up ARYSIZE resolving */
3212 exp->ex_Flags &= ~EXF_RESOLVED;
3216 if ((exp->ex_Flags & EXF_RESOLVED) == 0) {
3217 printf("early resolve failed\n");
3221 oflags = exp->ex_Flags;
3222 ooffset = exp->ex_TmpOffset;
3225 resolveExpAlign(exp, &tmpalign, RESOLVE_CONSTEXP);
3226 resolveStorageExp(exp, 0, &tmpbytes);
3228 if ((exp->ex_Flags & (EXF_CONST | EXF_PROBCONST)) == 0) {
3229 if (flags & RESOLVE_FAILOK)
3231 ExpPrintError(exp, TOK_ERR_EXPECTED_INTEGRER_CONST);
3232 dassert_exp(exp, 0);
3236 * Special interpreter execution to resolve the expression.
3241 char *dynamic_base; /* dynamic_base or NULL */
3244 bzero(&ct, offsetof(RunContext, ct_TmpCtxObjInfo));
3245 ct.ct_Flags |= CTF_RESOLVING;
3248 * NOTE: minimum alignment for posix_memalign() is sizeof(void *).
3250 align = sg->sg_TmpAlignMask + 1;
3251 if (align < sizeof(void *)) /* posix_memalign requirement */
3252 align = sizeof(void *);
3254 dynamic_base = NULL;
3255 if (sg->sg_TmpBytes <= sizeof(ct.u) && align <= sizeof(float128_t)) {
3256 ct.ct_CtxObject = &ct.u.dummyobj;
3257 info = &ct.ct_TmpCtxObjInfo;
3261 if (align < sizeof(float128_t))
3262 align = sizeof(float128_t);
3263 if ((extra = OINFO_ALIGNED_SIZE) < align)
3265 dassert(extra >= sizeof(ObjectInfo));
3266 posix_memalign((void *)&dynamic_base, align,
3267 sg->sg_TmpBytes + extra);
3268 ct.ct_CtxObject = (void *)(dynamic_base + extra);
3269 info = (void *)(dynamic_base + extra - sizeof(*info));
3271 ct.ct_TmpData = (void *)ct.ct_CtxObject;
3272 ct.ct_TmpBytes = sg->sg_TmpBytes;
3274 initObjectInfo(info, &VoidType, RSOP_TMPSPACE);
3275 exp->ex_Run(&ct, &data, exp);
3277 if ((exp->ex_Flags & EXF_CONST) == 0) {
3278 ExpPrintError(exp, TOK_ERR_EXPECTED_INTEGRER_CONST);
3279 dassert_exp(exp, 0);
3282 invalObjectInfo(info, dynamic_base);
3286 * exp is now a constant, restore the original ex_TmpOffset for normal
3287 * execution/operation (the storage may be needed for large constants).
3289 if (oflags & EXF_TMPRESOLVED) {
3290 exp->ex_TmpOffset = ooffset;
3291 /* resolveStorageExp(exp, &tmpbytes); */
3293 exp->ex_TmpOffset = -1;
3294 exp->ex_Flags &= ~EXF_TMPRESOLVED;
3296 resolveExpAlign(exp, &tmpalign, RESOLVE_CLEAN);
3303 resolveConstExpBool(SemGroup *isg, SemGroup *sg, Exp *exp, int flags,
3306 urunesize_t tmpbytes;
3307 urunesize_t tmpalign;
3308 srunesize_t ooffset;
3311 flags &= ~RESOLVE_AUTOCAST;
3313 if ((exp->ex_Flags & EXF_RESOLVED) == 0) {
3314 exp = ResolveExp(isg, sg, exp, NULL, flags);
3318 * [re]-resolve the storage from 0 so we can execute the expression.
3320 oflags = exp->ex_Flags;
3321 ooffset = exp->ex_TmpOffset;
3324 resolveExpAlign(exp, &tmpalign, RESOLVE_CONSTEXP);
3325 resolveStorageExp(exp, 0, &tmpbytes);
3326 if ((exp->ex_Flags & (EXF_CONST | EXF_PROBCONST)) == 0) {
3327 ExpPrintError(exp, TOK_ERR_EXPECTED_INTEGRER_CONST);
3328 dassert_exp(exp, 0);
3332 * Special interpreter execution to resolve the expression.
3340 bzero(&ct, offsetof(RunContext, ct_TmpCtxObjInfo));
3341 ct.ct_Flags |= CTF_RESOLVING;
3344 * NOTE: minimum alignment for posix_memalign() is sizeof(void *).
3346 dynamic_base = NULL;
3347 if (tmpbytes <= sizeof(ct.u) && tmpalign <= sizeof(float128_t)) {
3348 ct.ct_CtxObject = &ct.u.dummyobj;
3349 info = &ct.ct_TmpCtxObjInfo;
3353 if (tmpalign < sizeof(float128_t))
3354 tmpalign = sizeof(float128_t);
3355 if ((extra = OINFO_ALIGNED_SIZE) < tmpalign)
3357 dassert(extra >= sizeof(ObjectInfo));
3358 posix_memalign((void *)&dynamic_base, tmpalign, tmpbytes + extra);
3359 ct.ct_CtxObject = (void *)(dynamic_base + extra);
3360 info = (void *)(dynamic_base + extra - sizeof(*info));
3362 ct.ct_TmpData = (void *)ct.ct_CtxObject;
3363 ct.ct_TmpBytes = tmpbytes;
3365 initObjectInfo(info, &VoidType, RSOP_TMPSPACE);
3366 exp->ex_Run(&ct, &data, exp);
3369 if ((exp->ex_Flags & EXF_CONST) == 0) {
3370 ExpPrintError(exp, TOK_ERR_EXPECTED_INTEGRER_CONST);
3371 dassert_exp(exp, 0);
3373 ts->ts_Bool = rts->ts_Bool;
3374 invalObjectInfo(info, dynamic_base);
3378 * exp is now a constant, restore the original ex_TmpOffset for normal
3379 * execution/operation (the storage may be needed for large constants).
3381 if (oflags & EXF_TMPRESOLVED) {
3382 exp->ex_TmpOffset = ooffset;
3384 resolveStorageExp(exp, exp->ex_TmpOffset, &tmpbytes);
3386 exp->ex_TmpOffset = -1;
3387 exp->ex_Flags &= ~EXF_TMPRESOLVED;
3389 resolveExpAlign(exp, &tmpalign, RESOLVE_CLEAN);
3395 * Extract constant from already-constant-resolved expression.
3396 * resolveConstExp() must have previously been called on exp.
3398 * Expression must have already been constant-optimized, meaning that we
3399 * should be able to execute it without a context to access the cached
3400 * results in exp->u.
3402 * (This can also be called by the generator)
3405 resolveGetConstExpInt64(Exp *exp)
3410 dassert_exp(exp, (exp->ex_Flags & EXF_CONST));
3411 exp->ex_Run(NULL, &data, exp);
3413 if (exp->ex_Type->ty_Flags & TF_ISUNSIGNED) {
3414 switch (exp->ex_Type->ty_Bytes) {
3416 value = *(uint8_t *) data.data;
3419 value = *(uint16_t *) data.data;
3422 value = *(uint32_t *) data.data;
3425 value = *(uint64_t *) data.data;
3429 dassert_exp(exp, 0);
3433 switch (exp->ex_Type->ty_Bytes) {
3435 value = *(int8_t *) data.data;
3438 value = *(int16_t *) data.data;
3441 value = *(int32_t *) data.data;
3444 value = *(int64_t *) data.data;
3448 dassert_exp(exp, 0);
3456 resolveGetConstExpFloat128(Exp *exp)
3461 dassert_exp(exp, exp->ex_Token == TOK_FLOAT ||
3462 (exp->ex_Flags & EXF_CONST));
3463 exp->ex_Run(NULL, &data, exp);
3465 switch (exp->ex_Type->ty_Bytes) {
3467 value = (float128_t) *(float32_t *) data.data;
3470 value = (float128_t) *(float64_t *) data.data;
3473 value = *(float128_t *) data.data;
3477 dassert_exp(exp, 0);
3484 * resolveCompoundExp() - resolve a compound expression (called from
3485 * ResolveExp() and resolveExpOper()).
3487 * Resolve a compound expression. Compound expressions require a compound
3488 * type to normalize against. This will work for direct assignments, return
3489 * values, casts, and procedure arguments only.
3491 * NOTE: We can't use itype if EXF_REQ_ARRAY is specified because its hinting
3492 * for the array optimization case, which we cannot do.
3494 * Compound expressions may be used in conjuction with types reprsenting
3495 * classes, compound types, and procedure arguments. The compound expression
3496 * may contain subclasses of the superclasses expected by itype. This is
3497 * only allowed if the procedure's body has not yet been generated (for
3498 * example, a method call in a subclass).
3500 * Partially resolved operators are typically converted into procedure calls
3501 * and method calls are also partially resolved, so some elements may already
3504 * XXX named initialization, missing elements (structural initialization),
3505 * and so forth needs to be dealt with.
3508 resolveCompoundExp(SemGroup *isg, SemGroup *sg, Exp *exp,
3509 Type *itype, int flags)
3520 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
3523 * Expression dup()ing
3525 if (exp->ex_Flags & EXF_DUPEXP) {
3528 fprintf(stderr, "DUPEXPC %d\n", ++count);
3530 exp = DupExp(sg, exp);
3533 if (itype && (exp->ex_Flags & EXF_REQ_ARRAY) == 0)
3534 exp->ex_Type = itype;
3537 * If we don't have a SemGroup to normalize against, XXX how should we
3538 * normalize the compound expression?
3540 if (exp->ex_Type == NULL) {
3541 dassert_exp(exp, 0);
3545 * Normalize the compound expression based on the argument types expected
3546 * by the procedure. We have to resolve the type before we start the
3547 * scan in order to ensure that d_Offset is properly assigned.
3549 * Use the declarations found in the compound type semantic group to
3550 * coerce the procedure arguments to generate the correct compound type.
3551 * Note that ResolveExp() recursion must still use the SemGroup that was
3554 * XXX deal with defaults and pre-resolved arguments. XXX
3556 type = ResolveType(exp->ex_Type, NULL, 0);
3558 switch (type->ty_Op) {
3560 sg2 = type->ty_ArgsType.et_SemGroup;
3563 sg2 = type->ty_VarType.et_SemGroup;
3566 sg2 = type->ty_CompType.et_SemGroup;
3569 sg2 = type->ty_ClassType.et_SemGroup;
3572 dassert_exp(exp, 0);
3573 sg2 = NULL; /* NOT REACHED */
3576 pscan = &exp->ex_Lhs;
3579 * Scan the compound expression and match it up against the compound
3582 d = RUNE_FIRST(&sg2->sg_DeclList);
3583 while ((scan = *pscan) != NULL) {
3584 if (scan->ex_ArgId) {
3586 * Named argument, find it
3588 * (Overloading not allowed)
3590 int eno = TOK_ERR_ID_NOT_FOUND;
3593 nd = FindDeclId(sg2, scan->ex_ArgId, &eno);
3595 ExpFatalError(scan, eno);
3600 * XXX for now, punt on setting EXF_PROBCONST if the named
3601 * argument skips a declaration.
3603 if (nd != d && (d == NULL || nd != RUNE_NEXT(d, d_Node))) {
3609 * Unnamed argument, run through sequentially. Skip any
3610 * non-storage or global storage.
3612 while (d && d->d_Op != DOP_ARGS_STORAGE &&
3613 d->d_Op != DOP_STACK_STORAGE &&
3614 d->d_Op != DOP_GROUP_STORAGE
3616 d = RUNE_NEXT(d, d_Node);
3620 * Ran out of storage declarations. If this is a var-args
3621 * SemGroup then we actually create a new SemGroup (and
3622 * eventually a new type) to represent it.
3624 * We then extend the varargs SemGroup. This isn't pretty.
3628 (sg2->sg_Flags & SGF_VARARGS)) {
3629 sg2 = DupSemGroup(sg2->sg_Parent, NULL, sg2, 1);
3631 ResolveSemGroup(sg3, 0);
3638 "Too many arguments in "
3640 dassert_exp(scan, 0);
3646 * Unlink the expression from the compound list temporarily so we can
3647 * safely resolve it. Either cast the expression to the compound
3648 * element, or create a compound element (e.g. varargs call) to match
3651 * Due to the resolver moving things around, the elements of a
3652 * compound expression are sometimes resolved multiple times.
3654 *pscan = scan->ex_Next;
3655 scan->ex_Next = NULL;
3658 Type *dtype = d->d_StorDecl.ed_Type;
3664 if ((SimilarType(dtype, &PointerType) ||
3665 SimilarType(dtype, &ReferenceType)) &&
3666 (dtype->ty_SQFlags & SF_LVALUE) == SF_LVALUE)
3669 sflags = flags & ~RESOLVE_AUTOCAST;
3671 sflags = flags | RESOLVE_AUTOCAST;
3675 * LValueStor needs a RS, set ADDRUSED to make sure its available
3678 if (d->d_ScopeFlags & SCOPE_LVALUE)
3679 scan->ex_Flags |= EXF_ADDRUSED;
3681 if ((scan->ex_Flags & EXF_RESOLVED) == 0) {
3682 scan = ResolveExp(isg, sg, scan, dtype, sflags);
3685 * Cast the argument (scan) to the expected (dtype).
3687 * Since we have already resolved the expression we need to
3688 * do the same sanity checking that it would do to cast.
3690 * NOTE! Do NOT insert a cast when the target type is
3691 * lvalue void * or lvalue void @. Otherwise the
3692 * lv_Type loaded into the LValueStor will be incorrect
3693 * for operations, e.g. stdin.new()
3695 dassert_exp(scan, (dtype->ty_SQFlags & SF_LVALUE) == 0 ||
3696 (scan->ex_Type->ty_SQFlags & SF_LVALUE));
3698 if ((dtype->ty_SQFlags & SF_LVALUE) &&
3699 (SimilarType(&VoidPtrType, scan->ex_Type) &&
3700 SimilarType(&VoidPtrType, dtype)))
3703 } else if ((dtype->ty_SQFlags & SF_LVALUE) &&
3704 (SimilarType(&VoidRefType, scan->ex_Type) ||
3705 SimilarType(&VoidRefType, dtype)))
3708 } else if (!SimilarType(dtype, scan->ex_Type)) {
3713 printf("CAST ARGUMENT %016jx ", d->d_Id);
3714 printf("FROM %s ", TypeToStr(scan->ex_Type, NULL));
3715 printf("TO %s\n", TypeToStr(dtype, NULL));
3717 scan = resolveExpCast(isg, sg, scan, dtype, flags);
3721 Scope tscope = INIT_SCOPE(0);
3723 if ((scan->ex_Flags & EXF_RESOLVED) == 0) {
3724 scan = ResolveExp(isg, sg, scan, NULL,
3725 flags & ~RESOLVE_AUTOCAST);
3727 dassert(varargs != 0);
3728 d = AllocDeclaration(sg2, DOP_ARGS_STORAGE, &tscope);
3729 d->d_StorDecl.ed_Type = DEL_LVALUE(scan->ex_Type);
3731 d->d_Bytes = scan->ex_Type->ty_Bytes;
3732 d->d_AlignMask = scan->ex_Type->ty_AlignMask;
3735 * __align(%d) scope qualifier, override the type's alignment
3737 if ((d->d_Scope.s_Flags & SCOPE_ALIGN) &&
3738 d->d_Scope.s_AlignOverride) {
3739 d->d_AlignMask = d->d_Scope.s_AlignOverride - 1;
3743 sg2->sg_Bytes = BASEALIGN(sg2->sg_Bytes,
3746 d->d_Offset = sg2->sg_Bytes;
3747 d->d_Storage = GENSTAT_MEMDEF;
3749 sg2->sg_Bytes += d->d_Bytes;
3750 if (sg2->sg_AlignMask < d->d_AlignMask)
3751 sg2->sg_AlignMask = d->d_AlignMask;
3756 * Relink and check if constant
3758 scan->ex_Next = *pscan;
3760 if ((scan->ex_Flags & (EXF_CONST | EXF_PROBCONST)) == 0)
3762 stype = scan->ex_Type;
3765 * If the declaration requires an LVALUE, assert that we have an
3766 * lvalue. Otherwise set the direct-store request (also see
3767 * InterpCompoundExp).
3769 if (d->d_ScopeFlags & SCOPE_LVALUE) {
3770 if ((stype->ty_SQFlags & SF_LVALUE) == 0)
3771 fprintf(stderr, "argument must be an lvalue\n");
3772 dassert_exp(scan, stype->ty_SQFlags & SF_LVALUE);
3777 * Check content locking state against scan. Only matters when
3778 * passing a reference as an lvalue since only references can be
3781 * We don't have to worry if we are passing a pointer as an rvalue
3782 * since the code generator will fixup the locking in that case.
3784 if ((d->d_ScopeFlags & SCOPE_LVALUE) && stype->ty_Op == TY_REFTO) {
3788 scope1 = d->d_ScopeFlags & SCOPE_LOCKING_MASK;
3789 if (d->d_Id == RUNEID_THIS) {
3790 /* XXX temporarily ignore e.g. ptr.new() */
3792 } else if (scan->ex_Decl) {
3793 scope2 = scan->ex_Decl->d_ScopeFlags & SCOPE_LOCKING_MASK;
3796 * Var-args or unspecified, allow the default or explicitly
3799 scope2 = scope1 & SCOPE_UNLOCKED;
3801 if (scope1 != scope2) {
3802 fprintf(stderr, "scopes: %08x, %08x\n",
3804 if (d->d_Id == RUNEID_THIS) {
3805 ExpFatalError(scan, TOK_ERR_SCOPE_MISMATCH_THIS);
3807 ExpFatalError(scan, TOK_ERR_SCOPE_MISMATCH);
3816 d = RUNE_NEXT(d, d_Node);
3817 pscan = &scan->ex_Next;
3821 * Make sure the caller knows its a var-args function even if we didn't
3822 * supply any additional args. Otherwise the backend may not generate
3823 * the correct form for calls to the target.
3826 (sg2->sg_Flags & SGF_VARARGS)) {
3827 sg2 = DupSemGroup(sg2->sg_Parent, NULL, sg2, 1);
3832 * Resolve the varargs sg2 after building it.
3835 ResolveSemGroup(sg2, 0);
3839 * If we made a var-args call, adjust the expression's type
3842 dassert(type->ty_Op == TY_ARGS);
3843 exp->ex_Type = ResolveType(TypeToVarType(type, sg2), NULL, 0);
3846 exp->ex_Flags |= EXF_PROBCONST;
3848 exp->ex_Flags |= EXF_RESOLVED;
3853 * resolveBracketedExp() - resolve a bracketed expression.
3855 * Resolve a bracketed expression. Bracketed expressions require an array
3856 * type to normalize against.
3858 * The bracketed expressions may contain subclasses of the superclasses
3859 * expected by itype.
3862 resolveBracketedExp(SemGroup *isg, SemGroup *sg, Exp *exp,
3863 Type *itype, int flags)
3871 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
3874 * Expression dup()ing
3876 if (exp->ex_Flags & EXF_DUPEXP) {
3879 fprintf(stderr, "DUPEXPC %d\n", ++count);
3881 exp = DupExp(sg, exp);
3885 * Expression type is the hinted type.
3887 if (itype && (exp->ex_Flags & EXF_REQ_ARRAY) == 0)
3888 exp->ex_Type = itype;
3891 * We need a type to normalize against.
3893 if (exp->ex_Type == NULL) {
3894 dassert_exp(exp, 0);
3899 * Normalize the bracketed expression based on the array type. We have
3900 * to resolve the type before we start the scan in order to ensure that
3901 * d_Offset is properly assigned.
3903 type = ResolveType(exp->ex_Type, NULL, 0);
3904 if (type->ty_Op != TY_ARYOF) {
3905 dassert_exp(exp, 0);
3908 type = type->ty_AryType.et_Type; /* element type */
3911 * Scan the bracketed expression and match each element against the
3914 pscan = &exp->ex_Lhs;
3915 while ((scan = *pscan) != NULL) {
3920 * Unlink the expression from the compound list temporarily so we can
3921 * safely resolve it. Either cast the expression to the compound
3922 * element, or create a compound element (e.g. varargs call) to match
3925 * Due to the resolver moving things around, the elements of a
3926 * compound expression are sometimes resolved multiple times.
3928 *pscan = scan->ex_Next;
3929 scan->ex_Next = NULL;
3935 if ((SimilarType(dtype, &PointerType) ||
3936 SimilarType(dtype, &ReferenceType)) &&
3937 (dtype->ty_SQFlags & SF_LVALUE) == SF_LVALUE)
3940 sflags = flags & ~RESOLVE_AUTOCAST;
3942 sflags = flags | RESOLVE_AUTOCAST;
3946 * LValueStor needs a RS, set ADDRUSED to make sure its available to
3949 if (dtype->ty_SQFlags & SF_LVALUE)
3950 scan->ex_Flags |= EXF_ADDRUSED;
3952 if ((scan->ex_Flags & EXF_RESOLVED) == 0) {
3953 scan = ResolveExp(isg, sg, scan, dtype, sflags);
3956 * Since we have already resolved the expression we need to do
3957 * the same sanity checking that it would do to cast.
3960 (dtype->ty_SQFlags & SF_LVALUE) == 0 ||
3961 (scan->ex_Type->ty_SQFlags & SF_LVALUE));
3962 if (!SimilarType(dtype, scan->ex_Type)) {
3963 scan = resolveExpCast(isg, sg, scan,
3969 * Relink and check if constant
3971 scan->ex_Next = *pscan;
3973 if ((scan->ex_Flags & (EXF_CONST | EXF_PROBCONST)) == 0)
3975 stype = scan->ex_Type;
3978 * If the declaration requires an LVALUE, assert that we have an
3979 * lvalue. Otherwise set the direct-store request (also see
3980 * InterpCompoundExp).
3982 if (dtype->ty_SQFlags & SF_LVALUE) {
3983 if ((stype->ty_SQFlags & SF_LVALUE) == 0)
3984 fprintf(stderr, "argument must be an lvalue\n");
3985 dassert_exp(scan, stype->ty_SQFlags & SF_LVALUE);
3990 * XXX not applicable?
3992 * Check content locking state against scan. Only matters when
3993 * passing a pointer as an lvalue since only pointers can be
3996 * We don't have to worry if we are passing a pointer as an rvalue
3997 * since the code generator will fixup the locking in that case.
3999 if ((dtype->ty_SQFlags & SF_LVALUE) && stype->ty_Op == TY_REFTO) {
4004 scope1 = d->d_ScopeFlags & (SCOPE_UNTRACKED |
4008 scope1 = SCOPE_UNLOCKED;
4010 if (scope1 != scope2) {
4011 fprintf(stderr, "scopes: %08x, %08x\n",
4013 ExpFatalError(scan, TOK_ERR_SCOPE_MISMATCH);
4017 pscan = &scan->ex_Next;
4021 exp->ex_Flags |= EXF_PROBCONST;
4022 exp->ex_Flags |= EXF_RESOLVED;
4028 * resolveExpCast() - Cast the expression to the specified type and return
4029 * the cast expression.
4031 * Note that expression nodes depend on their ex_Type being correct, and also
4032 * expressions may be shared, so be careful not to modify the ex_Type (or
4033 * anything else) in the existing expression.
4035 * This code is somewhat different then resolveExpOper() and friends. The Exp
4036 * argument has already been resolved so do not resolve it again, and the
4037 * cast type already has SF_LVALUE set or cleared as appropriate (had better
4040 * As with operators we have to locate the cast declaration matching the cast
4044 resolveExpCast(SemGroup *isg, SemGroup *sg, Exp *exp, Type *ltype, int flags)
4051 flags &= ~RESOLVE_AUTOCAST;
4054 rtype = exp->ex_Type;
4055 dassert(rtype && ltype);
4057 * XXX attempt to cast from subclass to superclass?
4061 * XXX look in our local semantic hierarchy for a compatible cast ?
4063 dassert(ltype->ty_Op != TY_UNRESOLVED);
4064 dassert(rtype->ty_Op != TY_UNRESOLVED);
4067 * Look in the right hand (source) type for the cast
4069 d = findCast(rtype, ltype, rtype, flags);
4072 * If that fails then look in the left hand (destination) type for the
4076 d = findCast(ltype, ltype, rtype, flags);
4080 * Look for pointer or reference type casts
4082 if (d == NULL && rtype->ty_Op == TY_PTRTO) {
4083 d = findCast(&PointerType, ltype, rtype, flags);
4085 if (d == NULL && rtype->ty_Op == TY_REFTO) {
4086 d = findCast(&ReferenceType, ltype, rtype, flags);
4091 * We could not find a specific cast operator. There are some
4092 * inherent casts that we can do. We run through these in attempt to
4093 * come up with matching types.
4095 if (ltype->ty_Op != rtype->ty_Op &&
4096 (ltype->ty_Op == TY_PTRTO || ltype->ty_Op == TY_ARYOF) &&
4097 (rtype->ty_Op == TY_PTRTO || rtype->ty_Op == TY_ARYOF))
4100 * Pointers or arrays can be cast to pointers of the same
4103 * Cast the right hand type to an equivalent * pointer/array
4104 * of the right hand type and re-resolve the cast.
4106 exp = ExpToCastExp(exp,
4107 ResolveType(ChangeType(rtype, ltype->ty_Op), NULL, 0));
4108 return (resolveExpCast(isg, sg, exp, ltype, flags));
4109 } else if (MatchType(ltype, rtype) <= SG_COMPAT_PART) {
4111 * If the types are compatible (casting rtype->ltype), we can
4114 exp = ExpToCastExp(exp, ltype);
4115 } else if (MatchType(&NumericType, ltype) <= SG_COMPAT_SUBCLASS &&
4116 MatchType(&NumericType, rtype) <= SG_COMPAT_SUBCLASS) {
4118 * Casting from one numeric type to another must be supported by
4119 * the interpreter/compiler.
4121 exp = ExpToCastExp(exp, ltype);
4122 } else if (SimilarType(&VoidType, ltype)) {
4124 * Casting anything to void is allowed (throwing the object
4125 * away). E.g. statement-expressions.
4127 exp = ExpToCastExp(exp, ltype);
4128 } else if (SimilarType(&VoidPtrType, ltype)) {
4130 * Casting a pointer to a (void *) is trivial, but is only
4131 * allowed if the underlying structure does not contain any
4134 * NOTE: Generally only used when a pointer is being cast to an
4135 * integer. Rune does not allow casting back to other pointer
4138 * XXX validate integral # of objects fit in pointer range.
4140 if (rtype->ty_RawPtrType.et_Type->ty_Flags & TF_HASLVREF)
4141 ExpFatalError(exp, TOK_ERR_LIMITED_VOIDP_CAST);
4142 exp = ExpToCastExp(exp, ltype);
4143 } else if (SimilarType(&VoidRefType, ltype)) {
4145 * Casting a pointer to a (void @) is trivial.
4147 * NOTE: Generally only used when a pointer is being cast to an
4148 * integer. Rune does not allow casting back to other pointer
4151 * XXX validate integral # of objects fit in pointer range.
4153 if (rtype->ty_RawPtrType.et_Type->ty_Flags & TF_HASLVREF)
4154 ExpFatalError(exp, TOK_ERR_LIMITED_VOIDP_CAST);
4155 exp = ExpToCastExp(exp, ltype);
4156 } else if (SimilarType(rtype, &VoidPtrType)) {
4158 * Casting from a void pointer may not be trivial but we leave it
4159 * up to the interpreter/compiler.
4161 * Only allow if the target does not contain any pointers or if
4162 * the right-hand-side is NULL.
4164 * XXX validate integral # of objects fit in pointer range.
4166 switch (ltype->ty_Op) {
4168 if ((exp->ex_Flags & EXF_NULL) == 0 &&
4169 (ltype->ty_RefType.et_Type->ty_Flags & TF_HASLVREF))
4171 ExpFatalError(exp, TOK_ERR_LIMITED_VOIDP_CAST);
4177 exp = ExpToCastExp(exp, ltype);
4178 } else if (SimilarType(rtype, &CVoidPtrType)) {
4179 switch (ltype->ty_Op) {
4181 if ((exp->ex_Flags & EXF_NULL) == 0 &&
4182 (ltype->ty_RawPtrType.et_Type->ty_Flags & TF_HASLVREF)) {
4183 ExpFatalError(exp, TOK_ERR_LIMITED_VOIDP_CAST);
4189 } else if (SimilarType(ltype, &BoolType) &&
4190 (rtype->ty_Op == TY_PTRTO ||
4191 rtype->ty_Op == TY_REFTO))
4194 * Any pointer can be cast to a boolean, which tests against
4197 exp = ExpToCastExp(exp, ltype);
4198 } else if (ltype->ty_Op == rtype->ty_Op &&
4199 (ltype->ty_Op == TY_PTRTO || ltype->ty_Op == TY_ARYOF))
4202 * We allow casts of pointers to similar numeric types if they
4203 * are the same size, though this is really rather a hack. This
4204 * is mainly to handle the signed<->unsigned cast case. XXX
4208 switch (ltype->ty_Op) {
4210 if ((ltype->ty_RawPtrType.et_Type->ty_SQFlags &
4212 (rtype->ty_RawPtrType.et_Type->ty_SQFlags &
4214 ExpFatalError(exp, TOK_ERR_READONLY);
4216 if (MatchType(&NumericType, ltype->ty_RawPtrType.et_Type) <=
4217 SG_COMPAT_SUBCLASS &&
4218 MatchType(&NumericType, rtype->ty_RawPtrType.et_Type) <=
4219 SG_COMPAT_SUBCLASS &&
4220 ltype->ty_Bytes == rtype->ty_Bytes)
4222 exp = ExpToCastExp(exp, ltype);
4227 if ((ltype->ty_AryType.et_Type->ty_SQFlags & SF_CONST) == 0 &&
4228 (rtype->ty_AryType.et_Type->ty_SQFlags & SF_CONST) != 0) {
4229 ExpFatalError(exp, TOK_ERR_READONLY);
4231 if (MatchType(&NumericType, ltype->ty_AryType.et_Type) <=
4232 SG_COMPAT_SUBCLASS &&
4233 MatchType(&NumericType, rtype->ty_AryType.et_Type) <=
4234 SG_COMPAT_SUBCLASS &&
4235 ltype->ty_Bytes == rtype->ty_Bytes)
4237 exp = ExpToCastExp(exp, ltype);
4244 "Unable to resolve cast from pointers "
4245 "to dissimilar numeric types "
4247 TypeToStr(rtype, NULL),
4248 TypeToStr(ltype, NULL));
4249 dassert_exp(exp, 0);
4251 } else if (didagain == 0 &&
4252 (oflags & RESOLVE_AUTOCAST) &&
4253 (exp->ex_Flags2 & EX2F_WASCOMP) &&
4254 ltype->ty_Op == TY_COMPOUND &&
4255 rtype->ty_Op != TY_COMPOUND) {
4257 * The expression parser might have optimized-out the
4258 * TOK_COMPOUND wrapper around single-element parenthesized
4259 * expressions. Add it back in if the cast target expects a
4260 * compound expression.
4262 * XXX Currently hack a SetDupExp() to avoid re-resolving the
4263 * already-resolved component.
4265 exp = ExpToCompoundExp(exp, TOK_COMPOUND);
4266 exp = resolveCompoundExp(isg, sg, exp, ltype, flags);
4269 } else if (didagain == 0 &&
4270 (oflags & RESOLVE_AUTOCAST) &&
4271 (exp->ex_Flags2 & EX2F_WASCOMP) &&
4272 ltype->ty_Op == TY_CLASS &&
4273 rtype->ty_Op == TY_CLASS &&
4274 ltype != &VoidType &&
4275 (ltype->ty_Flags & (TF_ISBOOL | TF_ISINTEGER |
4276 TF_ISFLOATING)) == 0 &&
4277 (rtype->ty_Flags & (TF_ISBOOL | TF_ISINTEGER |
4280 * The expression parser might have optimized-out the
4281 * TOK_COMPOUND wrapper around single-element parenthesized
4282 * expressions used in a class iterator (in an assignment). Add
4283 * it back in if the ltype is a non-core class and rtype is a
4286 * XXX Currently hack a SetDupExp() to avoid re-resolving the
4287 * already-resolved component.
4289 exp = ExpToCompoundExp(exp, TOK_COMPOUND);
4290 exp = resolveCompoundExp(isg, sg, exp, ltype, flags);
4295 "Unable to resolve cast from %s to %s\n",
4296 TypeToStr(rtype, NULL),
4297 TypeToStr(ltype, NULL));
4298 dassert_exp(exp, 0);
4300 } else if (d->d_ScopeFlags & SCOPE_INTERNAL) {
4302 * We found a cast operator and it is an internal operator
4304 exp = ExpToCastExp(exp, ltype);
4308 * We found a cast operator and it is a Rune cast procedure. We must
4309 * convert the cast to a procedure call. If we want
4310 * resolveCompoundExp() to be able to generate a compatible procedure
4311 * (in a subclass) we have to tell it about the procedure.
4315 sexp = ExpToCompoundExp(exp, TOK_COMPOUND);
4316 if (d->d_ProcDecl.ed_ProcBody == NULL) /* XXX */
4318 sexp = resolveCompoundExp(isg, sg, sexp,
4319 d->d_ProcDecl.ed_Type->ty_ProcType.et_ArgsType,
4321 exp = AllocExp(NULL);
4322 exp->ex_Lhs = AllocExp(NULL);
4323 exp->ex_Lhs->ex_Token = TOK_DECL;
4324 exp->ex_Lhs->ex_Id = d->d_Id;
4325 exp->ex_Lhs->ex_Decl = d;
4326 exp->ex_Lhs->ex_Type = d->d_ProcDecl.ed_Type;
4327 exp->ex_Lhs->ex_Flags |= EXF_RESOLVED;
4329 exp->ex_Flags |= EXF_BINARY;
4330 exp->ex_Token = TOK_CALL;
4331 /* XXX use ltype or procedure's rettype? */
4332 exp->ex_Type = ltype;
4333 LexDupRef(&sexp->ex_LexRef, &exp->ex_LexRef);
4334 LexDupRef(&sexp->ex_LexRef, &exp->ex_Lhs->ex_LexRef);
4339 * Additional work to inline the procedure
4341 resolveDynamicProcedure(isg, sg, exp, flags);
4342 resolveProcedureInline(isg, sg, exp, flags);
4344 exp->ex_Flags |= EXF_RESOLVED;
4350 findCast(Type *btype, Type *ltype, Type *rtype, int flags)
4355 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
4357 dassert(rtype->ty_Op != TY_UNRESOLVED);
4358 dassert(ltype->ty_Op != TY_UNRESOLVED);
4361 * Locate the base type. If the base type does not have a SemGroup there
4362 * are no casts. (XXX put system operators here)
4364 sg = BaseType(&btype);
4365 dassert(btype->ty_Op != TY_UNRESOLVED);
4371 * Look for the cast in the SemGroup
4373 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
4374 if (d->d_Op == DOP_PROC && (d->d_ScopeFlags & SCOPE_CAST)) {
4375 ResolveType(d->d_ProcDecl.ed_Type, NULL, 0);
4376 if (MatchCastTypes(d, ltype, rtype))
4382 * Failed. If the base type is a compound type, look for the cast in the
4383 * SemGroup for each element making up the compound type. e.g. so
4384 * (mycustomtype, double) would find the cast in mycustomtype.
4386 if (btype->ty_Op == TY_COMPOUND) {
4387 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
4389 if (d->d_Op & DOPF_STORAGE) {
4390 ResolveType(d->d_StorDecl.ed_Type, NULL, 0);
4391 d2 = findCast(d->d_StorDecl.ed_Type,
4392 ltype, rtype, flags);
4393 } else if (d->d_Op == DOP_TYPEDEF) {
4394 ResolveType(d->d_StorDecl.ed_Type, NULL, 0);
4395 d2 = findCast(d->d_TypedefDecl.ed_Type,
4396 ltype, rtype, flags);
4409 * resolveExpOper() - resolve an operator
4411 * This is complex enough that it is broken out into its own procedure.
4412 * Normally we just look the operator up but we have to special case pointer
4413 * arithmatic because we do will not know until now that we have to do it.
4415 * itype is a return-type hint only. resolveExpOper() can ignore it if it
4416 * wishes. We currently use it to detect cast-to-void, such as when an
4417 * expression like "++i" is used in a for() loop or as a standalone
4418 * statement. This allows us to optimize the case.
4421 resolveExpOper(SemGroup *isg, SemGroup *sg, Exp *exp, Type *itype, int flags)
4424 int isPointerOp = 0;
4425 int isReferenceOp = 0;
4427 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
4429 dassert_exp(exp, exp->ex_Id != 0);
4430 if (exFlags & EXF_BINARY) {
4431 exLhs = ResolveExp(isg, sg, exLhs, NULL, flags);
4432 exRhs = ResolveExp(isg, sg, exRhs, NULL, flags);
4433 } else if (exFlags & EXF_UNARY) {
4434 exLhs = ResolveExp(isg, sg, exLhs, NULL, flags);
4436 dassert_exp(exp, 0);
4440 * If the lhs is a pointer look the operator up in the Pointer class
4441 * first. Operators in the Pointer class are special-cased. A second
4442 * pointer argument or a pointer return value must match the lhs pointer.
4444 * If this fails, or if the ltype is not a pointer, then look the
4445 * operator up normally.
4447 if (exLhs->ex_Type->ty_Op == TY_PTRTO) {
4451 if (exFlags & EXF_BINARY) {
4452 rtype = exRhs->ex_Type;
4453 ltype = exLhs->ex_Type;
4455 dassert(exFlags & EXF_UNARY);
4457 ltype = exLhs->ex_Type;
4459 d = findOper(&PointerType, exp->ex_Id, ltype, rtype, flags);
4463 d = findExpOper(exp, flags);
4464 } else if (exLhs->ex_Type->ty_Op == TY_REFTO) {
4468 if (exFlags & EXF_BINARY) {
4469 rtype = exRhs->ex_Type;
4470 ltype = exLhs->ex_Type;
4472 dassert(exFlags & EXF_UNARY);
4474 ltype = exLhs->ex_Type;
4476 d = findOper(&ReferenceType, exp->ex_Id, ltype, rtype, flags);
4480 d = findExpOper(exp, flags);
4482 d = findExpOper(exp, flags);
4486 * Fall through to finish up resolving the operator. We just set ex_Decl
4487 * for internal operators, and construct a call for non-internal
4488 * procedural operators.
4496 dassert_exp(exp, d != NULL);
4497 dassert_exp(exp, d->d_Op == DOP_PROC);
4498 dassert_exp(exp, d->d_ProcDecl.ed_Type->ty_Op == TY_PROC);
4499 type = d->d_ProcDecl.ed_Type;
4500 exType = type->ty_ProcType.et_RetType;
4503 * Special case for internal Pointer ops. The return type is the
4504 * left-hand type (we may still optimize it to void later).
4506 if (isReferenceOp &&
4507 (d->d_ScopeFlags & SCOPE_INTERNAL) &&
4508 SimilarType(&VoidRefType, exType))
4510 if (exType->ty_SQFlags & SF_LVALUE)
4511 exType = ADD_LVALUE(exLhs->ex_Type);
4513 exType = DEL_LVALUE(exLhs->ex_Type);
4517 (d->d_ScopeFlags & SCOPE_INTERNAL) &&
4518 SimilarType(&VoidPtrType, exType))
4520 if (exType->ty_SQFlags & SF_LVALUE)
4521 exType = ADD_LVALUE(exLhs->ex_Type);
4523 exType = DEL_LVALUE(exLhs->ex_Type);
4526 type = d->d_ProcDecl.ed_Type->ty_ProcType.et_ArgsType;
4527 dassert(type->ty_Op == TY_ARGS);
4528 sg2 = type->ty_ArgsType.et_SemGroup;
4531 * Assert that LVALUE requirements are met. XXX MatchType() code
4532 * should disallow the non-lvalue-cast-to-lvalue case so we don't
4533 * have to do a check here.
4535 RUNE_FOREACH(d2, &sg2->sg_DeclList, d_Node) {
4536 if ((d2->d_Op & DOPF_STORAGE) &&
4537 d2->d_Op != DOP_GLOBAL_STORAGE) {
4539 if ((d2->d_ScopeFlags & SCOPE_LVALUE) &&
4540 (exLhs->ex_Type->ty_SQFlags &
4544 "lhs of exp must be "
4546 dassert_exp(exp, 0);
4548 } else if (count == 1) {
4549 if ((d2->d_ScopeFlags & SCOPE_LVALUE) &&
4550 (exRhs->ex_Type->ty_SQFlags &
4554 "rhs of exp must be "
4556 dassert_exp(exp, 0);
4563 if (d->d_ScopeFlags & SCOPE_INTERNAL) {
4565 * Internal operator. Optimize any cast to void by having the
4566 * internal function deal with it. (since we aren't setting
4567 * exType the optimization currently doesn't do anything, see
4571 if (itype == &VoidType) {
4572 /* exType = itype; */
4573 exFlags |= EXF_RET_VOID;
4577 * Normal procedural operator. Convert the left and right hand
4578 * sides to a compound expression and convert exp to a TOK_CALL.
4579 * NOTE! ex_Rhs may be NULL (unary op).
4581 * The compound expression may need to rewrite a subclass
4582 * procedure, which it can do if the procedure's body has not yet
4583 * been created (or duplicated from the superclass). ex_Decl
4584 * must be set in this case.
4586 * Note that the expression structure may be shared. The
4587 * conversion is permanent so that is ok.
4589 * XXX keep the type intact?
4591 exLhs->ex_Next = exRhs;
4593 exRhs = ExpToCompoundExp(exRhs, TOK_COMPOUND);
4594 if (d->d_ProcDecl.ed_ProcBody == NULL)
4596 exRhs = resolveCompoundExp(isg, sg, exRhs, type, flags);
4597 exLhs = AllocExp(NULL);
4598 LexDupRef(&exp->ex_LexRef, &exLhs->ex_LexRef);
4599 exLhs->ex_Token = TOK_ID;
4600 exLhs->ex_Id = d->d_Id;
4602 exLhs->ex_Type = d->d_ProcDecl.ed_Type;
4603 exLhs->ex_Flags |= EXF_RESOLVED;
4604 exp->ex_Token = TOK_CALL;
4605 exFlags = EXF_BINARY;
4610 * Additional work to inline the procedure
4612 resolveDynamicProcedure(isg, sg, exp, flags);
4613 resolveProcedureInline(isg, sg, exp, flags);
4617 char buf[RUNE_IDTOSTR_LEN];
4618 fprintf(stderr, "Unable to resolve operator: %s\n",
4619 runeid_text(exp->ex_Id, buf));
4620 dassert_exp(exp, 0);
4624 * Flag a pure operator whos arguments are constants as probably being
4627 if (d->d_ScopeFlags & SCOPE_PURE) {
4628 if ((exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) &&
4630 (exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)))) {
4631 exFlags |= EXF_PROBCONST;
4635 exp->ex_Flags |= EXF_RESOLVED;
4641 * Helper, visibility must be properly set immediately, prior to any
4642 * circularity, to guarantee that search functions work without deferral.
4646 resvis_set(resvis_t *vis, int visibility)
4649 *vis->visp = visibility;
4655 * ResolveType() - Resolve a type (always returns its argument)
4657 * Resolve a type. Always returns consistent visibility information to the
4658 * caller, even if the resolution remains in-progress. Thus all
4659 * modifications to the resvis chain occurs on the front-end of any
4662 * Flags, Size and Alignment information might take several passes for
4663 * classes (due to chains of DF_DYNAMICREF'd processes), or arrays (due to
4664 * the * array size not being immediately resolvable).
4667 ResolveType(Type *type, resvis_t *vis, int retry)
4669 SemGroup *sg = NULL;
4675 myvis.visp = &dummy_vis;
4678 * Detect circular loop.
4680 if (type->ty_Flags & TF_RESOLVED) {
4681 resvis_set(vis, type->ty_Visibility);
4684 if (type->ty_Flags & TF_RESOLVING) {
4686 resvis_set(vis, type->ty_Visibility);
4690 type->ty_Flags |= TF_RESOLVING;
4693 * Remember that visibility data must be set at the head of any recursion
4698 switch (type->ty_Op) {
4701 * NOTE: Special case, PointerType and ReferenceType fields not in
4702 * classes XXX (force alignment and bytes)?
4704 dassert(type->ty_SQList ==
4705 &type->ty_ClassType.et_SemGroup->sg_ClassList);
4707 /* visibility already determined by resolveSuperClass? */
4708 dassert(type->ty_Visibility != 0);
4709 resvis_set(vis, type->ty_Visibility);
4712 * The superclass (if any) cannot depend on our subclass, so resolve
4713 * it first. Note that resolveSuperClass() does not do everything
4714 * because it has to be called in the ResolveClasses() stage, so
4715 * finish it up here with a real resolve.
4717 if (type->ty_ClassType.et_Super) {
4718 Type **superp = &type->ty_ClassType.et_Super;
4719 if ((*superp)->ty_Op == TY_UNRESOLVED)
4720 resolveSuperClass(*superp);
4721 ResolveType(*superp, NULL, 0);
4725 * DEPENDENCY - SG must resolve for us to resolve. (if we can't
4726 * resolve this it is likely an embedded object loop).
4728 sg = type->ty_ClassType.et_SemGroup;
4729 ResolveSemGroup(sg, 0);
4730 if (sg->sg_Flags & SGF_RESOLVED) {
4731 if (type != &PointerType && type != &ReferenceType) {
4732 type->ty_Bytes = sg->sg_Bytes;
4733 type->ty_AlignMask = sg->sg_AlignMask;
4740 * Fixup type ty_SQFlags here XXX removed Any hard class type must be
4741 * given the SF_HARD storage qualifier.
4743 if (sg->sg_Stmt->u.ClassStmt.es_Decl->d_ScopeFlags & SCOPE_HARD)
4744 type->ty_SQFlags |= SF_HARD;
4749 * NOTE: Do not set TF_HASLVREF, C pointers are not tracked.
4751 * Always complete, even if the target type is incomplete. (allow
4752 * circular references).
4754 type->ty_Bytes = sizeof(void *);
4755 type->ty_AlignMask = RAWPTR_ALIGN;
4756 myvis.visp = &type->ty_Visibility;
4757 ResolveType(type->ty_RawPtrType.et_Type, &myvis, 0);
4762 * Set TF_HASLVREF, references are tracked.
4764 * Always complete, even if the target type is incomplete. (allow
4765 * circular references).
4767 type->ty_Bytes = sizeof(ReferenceStor);
4768 type->ty_AlignMask = REFERENCESTOR_ALIGNMASK;
4769 type->ty_Flags |= TF_HASLVREF;
4770 myvis.visp = &type->ty_Visibility;
4771 ResolveType(type->ty_RefType.et_Type, &myvis, 0);
4776 * Inherit TF_HASLVREF (if array type is or contains something which
4777 * needs to be tracked).
4779 * The array size must resolve sufficiently for us to resolve.
4785 if (type->ty_AryType.et_OrigArySize) {
4786 type->ty_AryType.et_ArySize =
4787 DupExp(NULL, type->ty_AryType.et_OrigArySize);
4789 exp = type->ty_AryType.et_ArySize;
4790 atype = type->ty_AryType.et_Type;
4792 myvis.visp = &type->ty_Visibility;
4793 ResolveType(atype, &myvis, 0);
4794 exp = resolveConstExp(NULL, type->ty_AryType.et_SemGroup, exp, 0);
4796 if ((exp->ex_Flags & EXF_RESOLVED) &&
4797 (atype->ty_Flags & TF_RESOLVED))
4799 type->ty_AryType.et_ArySize = exp;
4800 type->ty_AryType.et_Count = resolveGetConstExpInt64(exp);
4801 type->ty_AlignMask = type->ty_AryType.et_Type->ty_AlignMask;
4802 type->ty_Bytes = type->ty_AryType.et_Type->ty_Bytes *
4803 type->ty_AryType.et_Count;
4804 type->ty_Flags |= type->ty_AryType.et_Type->ty_Flags &
4805 (TF_HASLVREF | TF_HASCONSTRUCT |
4806 TF_HASDESTRUCT | TF_HASGCONSTRUCT |
4807 TF_HASGDESTRUCT | TF_HASASS);
4814 * All elements of a compound type must resolve for the compound type
4817 * NOTE: TF_HASLVREF inherited as appropriate after switch.
4819 sg = type->ty_CompType.et_SemGroup;
4820 ResolveSemGroup(sg, 0);
4821 if (sg->sg_Flags & SGF_RESOLVED) {
4822 type->ty_Bytes = sg->sg_Bytes;
4823 type->ty_AlignMask = sg->sg_AlignMask;
4824 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4830 * All elements of a compound type must resolve for the compound type
4833 * NOTE: TF_HASLVREF inherited as appropriate after switch.
4835 sg = type->ty_VarType.et_SemGroup;
4836 ResolveSemGroup(sg, 0);
4837 if (sg->sg_Flags & SGF_RESOLVED) {
4838 type->ty_Bytes = sg->sg_Bytes;
4839 type->ty_AlignMask = sg->sg_AlignMask;
4840 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4846 * All elements of a compound type must resolve for the compound type
4849 * NOTE: TF_HASLVREF inherited as appropriate after switch.
4851 sg = type->ty_ArgsType.et_SemGroup;
4852 ResolveSemGroup(sg, 0);
4853 if (sg->sg_Flags & SGF_RESOLVED) {
4854 type->ty_Bytes = sg->sg_Bytes;
4855 type->ty_AlignMask = sg->sg_AlignMask;
4856 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4862 * We mark the type as resolved regardless of the state of the
4863 * underlying argument and return types.
4865 * NOTE: Storage not tracked.
4868 type->ty_AlignMask = 0;
4869 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4870 resvis_set(vis, type->ty_Visibility);
4871 ResolveType(type->ty_ProcType.et_ArgsType, NULL, 0);
4872 ResolveType(type->ty_ProcType.et_RetType, NULL, 0);
4877 * Raw storage must always resolve.
4879 * NOTE: Base storage is not tracked.
4881 type->ty_Bytes = type->ty_StorType.et_Bytes;
4882 /* XXX check pwr of 2 */
4884 type->ty_AlignMask = type->ty_Bytes - 1;
4885 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4886 resvis_set(vis, type->ty_Visibility);
4891 * We loop until the type is no longer TY_UNRESOLVED.
4893 * NOTE: resolveSuperClass() is not really a recursive function so we
4894 * don't have to pre-set visibility.
4896 resolveSuperClass(type);
4897 /* visibility set by resolveSuperClass() */
4898 goto loop_unresolved;
4902 * A Dynamic type is basically unknown at compile-time. Always
4905 * NOTE: Tracking unknown (must be handled at run-time).
4907 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4908 resvis_set(vis, type->ty_Visibility);
4913 * TY_IMPORT types cannot be directly referenced by the program. They
4914 * are implicitly used as a placeholder for a module's global storage
4917 * NOTE: Storage is persistent, so wrapper is not tracked.
4919 type->ty_Visibility = SCOPE_ALL_VISIBLE; /* XXX */
4920 resvis_set(vis, type->ty_Visibility);
4924 dpanic("Unknown type %d (type=%p)", type->ty_Op, type);
4929 type->ty_Flags &= ~TF_RESOLVING;
4930 type->ty_Flags |= TF_RESOLVED;
4932 if (sg->sg_Flags & SGF_ISINTEGER)
4933 type->ty_Flags |= TF_ISINTEGER;
4934 if (sg->sg_Flags & SGF_ISUNSIGNED)
4935 type->ty_Flags |= TF_ISUNSIGNED;
4936 if (sg->sg_Flags & SGF_ISFLOATING)
4937 type->ty_Flags |= TF_ISFLOATING;
4938 if (sg->sg_Flags & SGF_ISBOOL)
4939 type->ty_Flags |= TF_ISBOOL;
4940 if (sg->sg_Flags & SGF_HASASS)
4941 type->ty_Flags |= TF_HASASS;
4943 type->ty_Flags |= TF_HASLVREF;
4944 /* XXX TF_VARARGS */
4945 if (sg->sg_Flags & SGF_VARARGS)
4946 type->ty_Flags |= TF_HASLVREF;
4948 type->ty_Flags |= TF_HASCONSTRUCT;
4950 type->ty_Flags |= TF_HASDESTRUCT;
4952 * Combine constructor/destructor hint flags for globals because
4953 * we have just one linked list for global constructors and
4954 * destructors (no need to optimize heavily).
4957 type->ty_Flags |= TF_HASGCONSTRUCT | TF_HASGDESTRUCT;
4958 dassert(type->ty_Visibility != 0);
4962 * NOTE: visibility is always set prior to any deferral or
4969 * Resolve the default expression for the type, if any. We do not
4970 * require the expression to complete.
4972 * XXX qualified types just copy the exp. bad bad YYY
4974 * YYY ResolveExp() no ISG (import sem group)
4976 if (type->ty_OrigAssExp) {
4977 type->ty_Flags |= TF_HASASS;
4978 type->ty_AssExp = DupExp(sg, type->ty_OrigAssExp);
4979 type->ty_AssExp = ResolveExp(NULL, sg, type->ty_AssExp,
4985 * ty_DynamicVector is nominally used when a Rune binary is run, but we
4986 * also need to set up enough of it such that mixed interpretation and
4987 * execution, or even just straight interpretation, works. This is
4988 * because the interpreter calls into libruntime.
4990 type->ty_DynamicVector = DefaultDynamicVector;
4994 * XXX messes up later Storage/StorageAlign
4996 * Internal types may be implied during resolution, be sure to completely
4997 * resolve its alignment too.
4999 * (If not internal we have to wait because there might be recursive
5000 * dependencies on the type).
5002 if (type->ty_Flags & TF_ISINTERNAL) {
5003 urunesize_t dummy = 0;
5004 resolveTypeAlign(type, &dummy, 0);
5011 * resolveSuperClass() - resolve an unresolved dotted id sequence into a
5014 * Unresolved type identifier sequences must be resolved. We are also
5015 * responsible for setting the visibility of the type's elements.
5018 resolveSuperClass(Type *super)
5023 int visibility = SCOPE_ALL_VISIBLE;
5026 dassert_type(super, super->ty_Op == TY_UNRESOLVED);
5028 dottedId = super->ty_UnresType.et_DottedId;
5029 sg = super->ty_UnresType.et_SemGroup;
5031 d = FindDeclPath(NULL, super->ty_UnresType.et_ImportSemGroup,
5033 dottedId, FDC_NULL, &visibility, -1, &eno);
5035 errorDottedId(dottedId, "Unable to resolve class");
5036 dassert_type(super, 0);
5040 * Resolve the unresolved type. Note that this occurs during class
5041 * resolution and we can't call ResolveType() here without getting into a
5042 * loop, so we do not yet know storage requirements (ty_Bytes and
5047 sg = d->d_ClassDecl.ed_SemGroup;
5048 super->ty_Op = TY_CLASS;
5049 super->ty_ClassType.et_SemGroup = sg;
5050 super->ty_ClassType.et_Super = d->d_ClassDecl.ed_Super;
5051 super->ty_Visibility = visibility;
5052 if (super->ty_SQList)
5053 RUNE_REMOVE(super->ty_SQList, super, ty_Node);
5054 super->ty_SQList = &sg->sg_ClassList;
5055 RUNE_INSERT_TAIL(super->ty_SQList, super, ty_Node);
5056 dassert(visibility);
5057 /* can't resolve super here */
5059 * XXX should we move the class from the unresolved list to the new
5060 * SemGroup's actual list?
5065 * Adjust super instead of allocating a new super, so all other
5066 * references to super using this class path get resolved too.
5068 * XXX which AssExp do we use ?
5071 dassert_type(super, d->d_TypedefDecl.ed_Type != super);
5072 TypeToQualType(d->d_TypedefDecl.ed_Type,
5075 d->d_TypedefDecl.ed_Type->ty_SQFlags,
5078 super->ty_Visibility = visibility;
5079 /* can't resolve super here */
5082 errorDottedId(dottedId, "identifier is not a class or typedef");
5083 dassert_type(super, 0);
5088 * Resolve the declarations in a non-stack semantic group. The sg is being
5089 * referenced by someone, who resolves it with this. This may take multiple
5092 * - Resolve all real storage elements, referenced or not, so the structure
5093 * has a consistent size. Size and Alignment becomes valid when primarily
5094 * resolution via SGF_RESOLVED / SGF_GRESOLVED completes.
5096 * - Most procedures are only resolved on-demand and are not resolved here.
5097 * However, access to the SG implies that all constructors and destructors
5098 * must be active, so we resolve those.
5100 * - We must also resolve any DF_DYNAMICREF'd procedures, which are dynamic
5101 * method calls in sub-classes. The flag is set on the method in the
5102 * subclass when a method call is made in any super-class.
5104 * (Any newly added DF_DYNAMICREF'd procedures will be resolved by the code
5105 * setting the flag if it finds that the SG is undergoing resolution or
5106 * already resolved).
5108 * - We supply a dynamic index for all procedures, whether they are
5109 * referenced or not, and leave the index NULL if they are not. This allows
5110 * us to resolve the indices & extent of the dynamic index array even if late
5111 * procedures are added.
5113 * NOTE! This code does not resolve declarations related to executable
5114 * semantic groups, such as sub-blocks within a procedure, but it does have
5115 * to resolve procedure definitions found in Class's and such.
5117 * NOTE! This code handles the last stage of subclass refinement, by checking
5118 * the validity of the refinement and setting sg_Compat properly.
5122 ResolveSemGroup(SemGroup *sg, int retry)
5129 if ((sg->sg_Flags & (SGF_RESOLVED | SGF_GRESOLVED)) ==
5130 (SGF_RESOLVED | SGF_GRESOLVED)) {
5133 if (sg->sg_Flags & (SGF_RESOLVING | SGF_GRESOLVING)) {
5138 if (sg->sg_Flags & SGF_RESOLVED)
5140 sg->sg_Flags |= SGF_RESOLVING;
5145 * index 0 - reserved for dynamic initialization index 1 - reserved for
5146 * dynamic destructor
5151 * SECTION1 - INSTANTIATED OBJECT RESOLUTION & PROCEDURE RESOLUTION
5153 * Handle SCOPE_REFINE and DF_DYNAMICREF flagging. We resolve non-global
5154 * elements with real storage.
5156 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5158 * DF_DYNAMICREF requires that the declaration be resolved because it
5159 * might be used in a dynamic method call, even if it was not
5160 * directly referenced. So if the SemGroup (i.e. class) is
5161 * referenced at all, so to must the method.
5163 if (d->d_Flags & DF_DYNAMICREF) {
5164 if ((d->d_Flags & (DF_RESOLVED | DF_RESOLVING)) == 0) {
5170 * Process all procedures and any non-global instantiated storage.
5180 * Assign the dynamic index. There may be multiple entries for
5181 * the same d_Id, they are ordered such that refinements use the
5182 * same DynIndex as in the superclass which is what allows
5183 * dynamic method calls to work properly. All non-refined
5184 * subclass elements are ordered after all refined/non=refined
5185 * superclass elements (replacing the superclass element and
5186 * using the same DynIndex when refined).
5188 * We must assign d_DynIndex regardless of whether the procedure
5189 * is used or not to guarantee a consistent index between
5190 * super-class and sub-class.
5192 if ((d->d_ScopeFlags & SCOPE_INTERNAL) == 0 &&
5193 (d->d_ProcDecl.ed_Type->ty_SQFlags &
5194 (SF_METHOD | SF_GMETHOD)))
5196 d->d_DynIndex = dyncount;
5201 * Only process referenced procedures, plus any that were flagged
5202 * (see above), plus any constructors or destructors.
5204 if ((d->d_Flags & (DF_RESOLVED | DF_RESOLVING)) == 0) {
5205 if (d->d_ScopeFlags & (SCOPE_CONSTRUCTOR |
5206 SCOPE_DESTRUCTOR)) {
5210 if ((d->d_Flags & (DF_RESOLVED | DF_RESOLVING)) == 0)
5213 if (d->d_ScopeFlags & SCOPE_GLOBAL) {
5214 if ((d->d_Flags & DF_ONGLIST) == 0 &&
5215 (d->d_ScopeFlags & (SCOPE_CONSTRUCTOR |
5216 SCOPE_DESTRUCTOR))) {
5217 d->d_GNext = d->d_MyGroup->sg_GBase;
5218 d->d_MyGroup->sg_GBase = d;
5219 d->d_Flags |= DF_ONGLIST;
5220 sg->sg_Flags |= SGF_GABICALL;
5223 if ((d->d_Flags & DF_ONCLIST) == 0 &&
5224 (d->d_ScopeFlags & SCOPE_CONSTRUCTOR)) {
5225 d->d_CNext = d->d_MyGroup->sg_CBase;
5226 d->d_MyGroup->sg_CBase = d;
5227 d->d_Flags |= DF_ONCLIST;
5228 sg->sg_Flags |= SGF_ABICALL;
5230 if ((d->d_Flags & DF_ONDLIST) == 0 &&
5231 (d->d_ScopeFlags & SCOPE_DESTRUCTOR)) {
5232 d->d_DNext = d->d_MyGroup->sg_DBase;
5233 d->d_MyGroup->sg_DBase = d;
5234 d->d_Flags |= DF_ONDLIST;
5235 sg->sg_Flags |= SGF_ABICALL;
5239 case DOP_STACK_STORAGE:
5241 * can't happen. Stack storage is only used in executable
5246 case DOP_ARGS_STORAGE:
5247 case DOP_GROUP_STORAGE:
5249 if ((d->d_Flags & DF_RESOLVED) == 0) {
5254 if (ok == 0) /* save some time */
5259 * Update SG size, alignment, set d_Offset and d_Storage within
5262 if (sg->sg_AlignMask < d->d_AlignMask)
5263 sg->sg_AlignMask = d->d_AlignMask;
5264 sg->sg_Bytes = BASEALIGN(sg->sg_Bytes, d->d_AlignMask);
5265 d->d_Offset = sg->sg_Bytes;
5268 * Set d_Storage based on scope and intended default for d_Op.
5270 if (d->d_Op == DOP_ARGS_STORAGE) {
5271 if (d->d_ScopeFlags & SCOPE_UNTRACKED)
5272 d->d_Storage = GENSTAT_NONE;
5273 else if (d->d_ScopeFlags & SCOPE_UNLOCKED)
5274 d->d_Storage = GENSTAT_REFD;
5275 else if (d->d_ScopeFlags & SCOPE_SOFT)
5276 d->d_Storage = GENSTAT_LOCK;
5277 else if (d->d_ScopeFlags & SCOPE_HARD)
5278 d->d_Storage = GENSTAT_LOCKH;
5280 d->d_Storage = GENSTAT_ARGDEF;
5282 d->d_Storage = GENSTAT_MEMDEF;
5284 sg->sg_Bytes += d->d_Bytes;
5286 type = d->d_StorDecl.ed_Type;
5287 if (d->d_StorDecl.ed_OrigAssExp)
5288 sg->sg_Flags |= SGF_HASASS;
5289 if (type->ty_Flags & TF_HASASS)
5290 sg->sg_Flags |= SGF_HASASS;
5291 if (type->ty_Flags & TF_HASLVREF)
5292 sg->sg_Flags |= SGF_HASLVREF;
5293 if (type->ty_Flags & TF_HASCONSTRUCT)
5294 sg->sg_Flags |= SGF_ABICALL;
5295 if (type->ty_Flags & TF_HASDESTRUCT)
5296 sg->sg_Flags |= SGF_ABICALL;
5297 if (type->ty_Flags & TF_HASGCONSTRUCT)
5298 sg->sg_Flags |= SGF_GABICALL;
5299 if (type->ty_Flags & TF_HASGDESTRUCT)
5300 sg->sg_Flags |= SGF_GABICALL;
5302 case DOP_GLOBAL_STORAGE:
5303 /* handled in pass2 */
5306 dassert_semgrp(sg, 0);
5311 * Finish up any refinements. (Effects 'ok'? no for now)
5313 if (d->d_ScopeFlags & SCOPE_REFINE) {
5314 if (d->d_Flags & (DF_RESOLVING | DF_RESOLVED)) {
5315 ResolveDecl(d->d_Super, 0);
5317 RefineDeclaration(sg, d->d_Super, d);
5324 sg->sg_Bytes = BASEALIGN(sg->sg_Bytes, sg->sg_AlignMask);
5325 sg->sg_Flags &= ~SGF_RESOLVING;
5326 sg->sg_Flags |= SGF_RESOLVED;
5329 * If no dynamic methods and no dynamic initialization or destruction
5330 * required, set dyncount to 0.
5332 if (dyncount == 2 &&
5333 (sg->sg_Flags & SGF_HASASS) == 0 &&
5334 sg->sg_SRBase == NULL &&
5335 sg->sg_CBase == NULL &&
5336 sg->sg_DBase == NULL) {
5339 sg->sg_DynCount = dyncount;
5340 sg->sg_Flags &= ~SGF_RESOLVING;
5344 * SECTION2 - GLOBAL RESOLUTION
5347 if (sg->sg_Flags & SGF_GRESOLVED)
5349 sg->sg_Flags |= SGF_GRESOLVING;
5350 sg->sg_GlobalBytes = 0;
5353 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5361 case DOP_STACK_STORAGE:
5363 * can't happen. Stack storage is only used in executable
5367 case DOP_ARGS_STORAGE:
5368 case DOP_GROUP_STORAGE:
5370 * Non-globals were handled in section1
5373 case DOP_GLOBAL_STORAGE:
5375 * Global storage is handled in section2
5377 * NOTE: We only process referenced global storage. This will
5378 * include global elements referenced by constructors, which are
5379 * always run even if not specifically referenced.
5383 if ((d->d_Flags & (DF_RESOLVING | DF_RESOLVED)) == 0)
5386 if ((d->d_Flags & DF_RESOLVED) == 0) {
5392 if (ok == 0) /* save some time */
5396 if (sg->sg_GlobalAlignMask < d->d_AlignMask)
5397 sg->sg_GlobalAlignMask = d->d_AlignMask;
5398 sg->sg_GlobalBytes = (sg->sg_GlobalBytes +
5399 d->d_AlignMask) & ~d->d_AlignMask;
5400 d->d_Offset = sg->sg_GlobalBytes;
5401 d->d_Storage = GENSTAT_MEMDEF;
5402 sg->sg_GlobalBytes += d->d_Bytes;
5403 if (d->d_StorDecl.ed_OrigAssExp)
5404 sg->sg_Flags |= SGF_GHASASS;
5406 type = d->d_StorDecl.ed_Type;
5407 if (type->ty_Flags & TF_HASASS)
5408 sg->sg_Flags |= SGF_GHASASS;
5409 if (type->ty_Flags & TF_HASLVREF)
5410 sg->sg_Flags |= SGF_GHASLVPTR;
5411 if (type->ty_Flags & TF_HASCONSTRUCT)
5412 sg->sg_Flags |= SGF_ABICALL;
5413 if (type->ty_Flags & TF_HASDESTRUCT)
5414 sg->sg_Flags |= SGF_ABICALL;
5415 if (type->ty_Flags & TF_HASGCONSTRUCT)
5416 sg->sg_Flags |= SGF_ABICALL;
5417 if (type->ty_Flags & TF_HASGDESTRUCT)
5418 sg->sg_Flags |= SGF_ABICALL;
5421 dassert_semgrp(sg, 0);
5426 * Finish up any refinements. (Effects 'ok'? no for now)
5428 if (d->d_ScopeFlags & SCOPE_REFINE) {
5429 if (d->d_Flags & (DF_RESOLVING | DF_RESOLVED)) {
5430 ResolveDecl(d->d_Super, 0);
5432 RefineDeclaration(sg, d->d_Super, d);
5441 sg->sg_GlobalBytes = (sg->sg_GlobalBytes +
5442 sg->sg_GlobalAlignMask) &
5443 ~sg->sg_GlobalAlignMask;
5444 sg->sg_Flags &= ~SGF_GRESOLVING;
5445 sg->sg_Flags |= SGF_GRESOLVED;
5449 * SECTION3 - Final rollup (future)
5453 if ((sg->sg_Flags & (SGF_RESOLVED | SGF_GRESOLVED)) !=
5454 (SGF_RESOLVED | SGF_GRESOLVED)) {
5459 * This gets hit of Int32Type is resolved before its class.
5460 * This is a big no-no.
5462 if (sg == Int32Type.ty_ClassType.et_SemGroup &&
5465 dpanic("Resolver improperly early-resolved Int32Type\n");
5471 * findExpOper() - Find operator declaration matching expression
5473 * Locate the operator declaration (a DOP_PROCDEF) that matches the
5474 * expression or NULL if no match could be found. The expression's left and
5475 * right hand sides must already be resolved.
5477 * NOTE! A temporary 'copy' Exp may be passed, not all fields are valid.
5479 static Declaration *testIConstantForType(Declaration *d, Type *type, Exp *exp);
5480 static Declaration *testFConstantForType(Declaration *d, Type *type, Exp *exp);
5484 findExpOper(Exp *exp, int flags)
5490 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
5492 if (exp->ex_Flags & EXF_BINARY) {
5493 rtype = exp->ex_Rhs->ex_Type;
5494 ltype = exp->ex_Lhs->ex_Type;
5496 dassert(exp->ex_Flags & EXF_UNARY);
5498 ltype = exp->ex_Lhs->ex_Type;
5502 * XXX look in our local semantic hierarchy for a compatible operator ?
5506 * Attempt to find a matching operator from the left hand side type.
5508 d = findOper(ltype, exp->ex_Id, ltype, rtype, flags);
5510 if (d || (exp->ex_Flags & EXF_BINARY) == 0)
5514 * Attempt to find a matching binary operator from the right hand side
5517 d = findOper(rtype, exp->ex_Id, ltype, rtype, flags);
5520 * If that fails but either the left or right-hand sides are constants,
5521 * see if we can find an operator by casting the constant to the
5525 if (exp->ex_Rhs->ex_Token == TOK_INTEGER &&
5526 exp->ex_Lhs->ex_Token != TOK_INTEGER &&
5527 exp->ex_Lhs->ex_Token != TOK_FLOAT &&
5528 (ltype->ty_Flags & TF_ISINTEGER)) {
5529 d = findOper(ltype, exp->ex_Id, ltype, ltype, flags);
5531 d = testIConstantForType(d, ltype, exp->ex_Rhs);
5532 } else if (exp->ex_Lhs->ex_Token == TOK_INTEGER &&
5533 exp->ex_Rhs->ex_Token != TOK_INTEGER &&
5534 exp->ex_Rhs->ex_Token != TOK_FLOAT &&
5535 (rtype->ty_Flags & TF_ISINTEGER)) {
5536 d = findOper(rtype, exp->ex_Id, rtype, rtype, flags);
5538 d = testIConstantForType(d, rtype, exp->ex_Lhs);
5539 } else if (exp->ex_Rhs->ex_Token == TOK_FLOAT &&
5540 exp->ex_Lhs->ex_Token != TOK_INTEGER &&
5541 exp->ex_Lhs->ex_Token != TOK_FLOAT &&
5542 (ltype->ty_Flags & TF_ISFLOATING)) {
5543 d = findOper(ltype, exp->ex_Id, ltype, ltype, flags);
5545 d = testFConstantForType(d, ltype, exp->ex_Rhs);
5546 } else if (exp->ex_Lhs->ex_Token == TOK_FLOAT &&
5547 exp->ex_Rhs->ex_Token != TOK_INTEGER &&
5548 exp->ex_Rhs->ex_Token != TOK_FLOAT &&
5549 (rtype->ty_Flags & TF_ISFLOATING)) {
5550 d = findOper(rtype, exp->ex_Id, rtype, rtype, flags);
5552 d = testFConstantForType(d, rtype, exp->ex_Lhs);
5560 * Calculate whether the constant can be safely cast. If it can, cast the
5561 * constant and return d. Otherwise complain and return NULL.
5565 testIConstantForType(Declaration *d, Type *type, Exp *exp)
5567 int64_t v = resolveGetConstExpInt64(exp);
5569 if (type->ty_Flags & TF_ISUNSIGNED) {
5570 switch (type->ty_Bytes) {
5572 if (v != (int64_t) (uint8_t) v)
5576 if (v != (int64_t) (uint16_t) v)
5580 if (v != (int64_t) (uint32_t) v)
5589 switch (type->ty_Bytes) {
5591 if (v != (int64_t) (int8_t) v)
5595 if (v != (int64_t) (int16_t) v)
5599 if (v != (int64_t) (int32_t) v)
5610 * If successful change the constant's type and reset the interpreter to
5614 exp->ex_Type = type;
5615 exp->ex_Run = RunUnresolvedExp;
5616 exp->ex_Run64 = Run64DefaultExp;
5618 ExpPrintError(exp, TOK_ERR_AUTOCAST_VALUE);
5625 testFConstantForType(Declaration *d, Type *type, Exp *exp)
5627 float128_t v = resolveGetConstExpFloat128(exp);
5629 switch (type->ty_Bytes) {
5631 if (v != (float32_t) v)
5635 if (v != (float64_t) v)
5643 * If successful change the constant's type and reset the interpreter to
5647 exp->ex_Type = type;
5648 exp->ex_Run = RunUnresolvedExp;
5649 exp->ex_Run64 = Run64DefaultExp;
5651 ExpPrintError(exp, TOK_ERR_AUTOCAST_VALUE);
5658 findOper(Type *btype, runeid_t id, Type *ltype, Type *rtype, int flags)
5662 int args = (rtype != NULL) ? 2 : 1;
5664 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
5667 * Locate the base type. If the base type does not have a SemGroup there
5668 * are no operators. (XXX put system operators here)
5670 sg = BaseType(&btype);
5676 * Look for the operator in the SemGroup
5678 * TODO - For reasons currently unknown, complex internal operators
5679 * in the Pointer and Reference class (and probably others)
5680 * are not able to completely match if we do not pre-resolve
5681 * all procedural declarations before looking for matches.
5682 * It is unclear why this is the case.
5685 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5686 if (d->d_MyGroup == sg && d->d_Op == DOP_PROC) {
5691 for (d = FindOperId(sg, id, args); d; d = d->d_ONext) {
5693 if (d->d_MyGroup == sg &&
5694 d->d_Op == DOP_PROC &&
5695 d->d_ProcDecl.ed_OperId == id &&
5696 MatchOperatorTypes(d, ltype, rtype))
5703 * Failed. If the base type is a compound type, look for the operator in
5704 * the SemGroup for each element making up the compound type. e.g. so
5705 * (mycustomtype, double) would find the operator in mycustomtype.
5707 if (btype->ty_Op == TY_COMPOUND) {
5708 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5710 if (d->d_Op & DOPF_STORAGE) {
5711 d2 = findOper(d->d_StorDecl.ed_Type, id,
5712 ltype, rtype, flags);
5713 } else if (d->d_Op == DOP_TYPEDEF) {
5714 d2 = findOper(d->d_TypedefDecl.ed_Type, id,
5715 ltype, rtype, flags);
5727 errorDottedId(runeid_t *ary, const char *ctl,...)
5729 char buf[RUNE_IDTOSTR_LEN];
5734 vfprintf(stderr, ctl, va);
5736 fprintf(stderr, ": %s", runeid_text(ary[0], buf));
5737 for (i = 1; ary[i]; ++i)
5738 fprintf(stderr, ".%s", runeid_text(ary[i], buf));
5739 fprintf(stderr, "\n");
5743 * Resolve the alignment requirements for SemGroups related to statements,
5744 * including the alignment requirements needed for temporary expression
5749 ResolveAlignment(Stmt *st, int flags)
5751 SemGroup *sg = st->st_MyGroup;
5754 if (st->st_Flags & STF_ALIGNRESOLVED)
5756 st->st_Flags |= STF_ALIGNRESOLVED;
5759 * If this is an executable semantic layer or an import layer then assign
5760 * storage to declarations up-front. Of the various DOP_*_STORAGE ops,
5761 * we should only see DOP_STACK_STORAGE and DOP_GLOBAL_STORAGE.
5763 * Note: if this is the root ST_Import STF_SEMANTIC is *NOT* set and sg
5766 if ((st->st_Flags & STF_SEMANTIC) && st->st_Op != ST_Class) {
5770 * Pre-scan for alignment. Don't try to propagate the alignment to
5771 * the parent for now as that would require recalculating the
5774 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5776 case DOP_STACK_STORAGE:
5777 case DOP_ARGS_STORAGE:
5778 case DOP_GROUP_STORAGE:
5779 if (sg->sg_AlignMask < d->d_AlignMask)
5780 sg->sg_AlignMask = d->d_AlignMask;
5782 case DOP_GLOBAL_STORAGE:
5783 if (sg->sg_GlobalAlignMask < d->d_AlignMask)
5784 sg->sg_GlobalAlignMask = d->d_AlignMask;
5792 switch (st->st_Op) {
5800 if (st->st_TypedefStmt.es_Decl->d_Flags & DF_RESOLVED) {
5801 resolveDeclAlign(st->st_TypedefStmt.es_Decl,
5802 &sg->sg_TmpAlignMask,
5808 * NOTE: Don't calculate for declarations that belong in a different
5815 d = st->st_DeclStmt.es_Decl;
5817 for (i = 0; i < st->st_DeclStmt.es_DeclCount; ++i) {
5818 if (st->st_MyGroup == d->d_MyGroup &&
5819 (d->d_Flags & DF_RESOLVED)) {
5821 &sg->sg_TmpAlignMask,
5824 d = RUNE_NEXT(d, d_Node);
5836 if (st->st_LoopStmt.es_BCond) {
5837 resolveExpAlign(st->st_LoopStmt.es_BCond,
5838 &sg->sg_TmpAlignMask,
5841 if (st->st_LoopStmt.es_ACond) {
5842 resolveExpAlign(st->st_LoopStmt.es_ACond,
5843 &sg->sg_TmpAlignMask,
5846 if (st->st_LoopStmt.es_AExp) {
5847 resolveExpAlign(st->st_LoopStmt.es_AExp,
5848 &sg->sg_TmpAlignMask,
5858 resolveExpAlign(st->st_IfStmt.es_Exp,
5859 &sg->sg_TmpAlignMask,
5863 if (st->st_RetStmt.es_Exp)
5864 resolveExpAlign(st->st_RetStmt.es_Exp,
5865 &sg->sg_TmpAlignMask,
5869 if (st->st_ResStmt.es_Exp)
5870 resolveExpAlign(st->st_ResStmt.es_Exp,
5871 &sg->sg_TmpAlignMask,
5876 * The switch expression's temporary data must be saved while we are
5877 * executing the sub-statements (the cases).
5879 resolveExpAlign(st->st_SwStmt.es_Exp,
5880 &sg->sg_TmpAlignMask,
5884 if (st->st_CaseStmt.es_Exp)
5885 resolveExpAlign(st->st_CaseStmt.es_Exp,
5886 &sg->sg_TmpAlignMask,
5890 resolveExpAlign(st->st_ExpStmt.es_Exp,
5891 &sg->sg_TmpAlignMask,
5894 case ST_ThreadSched:
5897 dassert_stmt(st, 0);
5901 * Calculate storage requirements for substatements. offset acts as our
5902 * base. We union the storage for the substatements together. Note that
5903 * often scan->sg_MyGroup == sg.
5905 RUNE_FOREACH(scan, &st->st_List, st_Node) {
5906 if (scan->st_Op == ST_Class) {
5907 if (scan->u.ClassStmt.es_Decl->d_Flags & DF_RESOLVED)
5908 ResolveAlignment(scan, flags);
5909 } else if (scan->st_Op == ST_Decl &&
5910 scan->st_DeclStmt.es_Decl->d_MyGroup !=
5916 } else if (scan->st_Op == ST_Decl &&
5917 (scan->st_DeclStmt.es_Decl->d_Flags & DF_RESOLVED)) {
5919 * See prior comments, skip declarations that were moved to
5922 * (already resolved so can use junk offsets)
5924 resolveDeclAlign(scan->st_DeclStmt.es_Decl,
5925 &sg->sg_TmpAlignMask,
5927 } else if (scan->st_Op == ST_Proc &&
5928 scan->st_ProcStmt.es_Decl->d_ProcDecl.ed_OrigBody == scan)
5930 /* Do not resolve template procedures! */
5931 } else if (scan->st_Flags & STF_SEMTOP) {
5932 ResolveAlignment(scan, flags);
5934 ResolveAlignment(scan, flags);
5939 * If this is a new semantic level call resolveStorageSemGroup() to do
5940 * the final cleanup of SemGroup issues. This will redundantly calculate
5941 * temporary space requirements. Also, due to type/class references the
5942 * temporary space for a class may have already been resolved. Since a
5943 * class can only contain declarations it had better match what we
5946 * Note that for non-Class executable SemGroup's TmpBytes is incorporated
5947 * in a downward fashion while sg_Bytes is incorporated in an upward
5948 * fashion. It can become quite confusing. Don't ask me why I did it
5951 if (st->st_Flags & STF_SEMANTIC) {
5952 if ((sg->sg_Flags & SGF_TMPRESOLVED) == 0) {
5953 resolveSemGroupAlign(sg, flags);
5958 * Propagate alignment requirements upward.
5960 if ((st->st_Flags & (STF_SEMANTIC | STF_SEMTOP)) == STF_SEMANTIC) {
5961 if (sg->sg_Parent->sg_AlignMask < sg->sg_AlignMask)
5962 sg->sg_Parent->sg_AlignMask = sg->sg_AlignMask;
5963 if (sg->sg_Parent->sg_TmpAlignMask < sg->sg_TmpAlignMask)
5964 sg->sg_Parent->sg_TmpAlignMask = sg->sg_TmpAlignMask;
5969 * ResolveStorage() - Final storage resolution pass
5971 * This pass carefully scans the SemGroup hierarchy and assigns offsets to
5974 * PROCEDURES - all the various 'executable' semantic layers in a procedure
5975 * are collapsed together for efficiency, so we only have to manage one
5976 * context. This means that the d_Offset assigned to declarations in
5977 * sub-blocks may exceed the sg_ size of the sub-block's SemGroup. We do not
5978 * attempt to resolve procedure body templates (d_ProcDecl.ed_OrigBody).
5980 * CLASSES - are given offsets in their SemGroup's relative to 0, if not
5983 * IMPORTS - are given offsets in their SemGroup's relative to 0
5985 * COMPOUND TYPES - (such as procedure arguments) are given offsets in their
5986 * SemGroup's relative to 0.
5988 * TEMPORARY STORAGE - expressions may require temporary storage for
5989 * intermediate results. That space is reserved here.
5991 * We specifically do not resolve unrelated storage.
5995 ResolveStorage(Stmt *st, int flags)
6001 SemGroup *sg = st->st_MyGroup;
6006 if (st->st_Op != ST_Class) {
6007 dassert((st->st_Flags & STF_RESOLVING) == 0);
6008 if (st->st_Flags & STF_RESOLVED) {
6011 st->st_Flags |= STF_RESOLVING;
6014 if ((st->st_Flags & STF_ALIGNRESOLVED) == 0)
6016 dassert((st->st_Flags & STF_TMPRESOLVED) == 0);
6017 if (st->st_Flags & STF_TMPRESOLVED)
6019 st->st_Flags |= STF_TMPRESOLVED;
6022 * If this is an executable semantic layer or an import layer then assign
6023 * storage to declarations up-front. Of the various DOP_*_STORAGE ops,
6024 * we should only see DOP_STACK_STORAGE and DOP_GLOBAL_STORAGE.
6026 * Note: if this is the root ST_Import STF_SEMANTIC is *NOT* set and sg
6029 if ((st->st_Flags & STF_SEMANTIC) && st->st_Op != ST_Class) {
6032 dassert((sg->sg_Flags & (SGF_FRESOLVED | SGF_FRESOLVING)) == 0);
6034 sg->sg_Flags |= SGF_FRESOLVING;
6037 * The base offset for sub-semantic-blocks must match the alignment
6038 * they require in order to allow us to do an aligned BZEROing of the
6039 * space. We do not include the temporary space here (it does not
6040 * need to be BZERO'd).
6042 * NOTE: sg_TmpAlignMask is taken into accoun when the top-level
6043 * frame is allocated.
6045 if (st->st_Flags & STF_SEMTOP) {
6046 dassert(sg->sg_Bytes == 0);
6049 base = BASEALIGN(sg->sg_Parent->sg_Bytes,
6053 sg->sg_BlkOffset = base;
6056 * Classify storage (note: class decls are handled elsewhere)
6058 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
6060 * Set d_Storage based on scope and intended default for d_Op.
6062 if (d->d_ScopeFlags & SCOPE_UNTRACKED) {
6063 d->d_Storage = GENSTAT_NONE;
6064 } else if (d->d_ScopeFlags & SCOPE_UNLOCKED) {
6065 d->d_Storage = GENSTAT_REFD;
6066 } else if (d->d_ScopeFlags & SCOPE_SOFT) {
6067 d->d_Storage = GENSTAT_LOCK;
6068 } else if (d->d_ScopeFlags & SCOPE_HARD) {
6069 d->d_Storage = GENSTAT_LOCKH;
6072 case DOP_STACK_STORAGE:
6073 d->d_Storage = GENSTAT_STKDEF;
6075 case DOP_ARGS_STORAGE:
6076 d->d_Storage = GENSTAT_ARGDEF;
6078 case DOP_GROUP_STORAGE:
6079 d->d_Storage = GENSTAT_MEMDEF;
6081 case DOP_GLOBAL_STORAGE:
6082 d->d_Storage = GENSTAT_MEMDEF;
6088 case DOP_STACK_STORAGE:
6089 case DOP_ARGS_STORAGE:
6090 case DOP_GROUP_STORAGE:
6091 type = d->d_StorDecl.ed_Type;
6092 base = BASEALIGN(base, d->d_AlignMask);
6096 if (d->d_StorDecl.ed_OrigAssExp)
6097 sg->sg_Flags |= SGF_HASASS;
6098 if (type->ty_Flags & TF_HASASS)
6099 sg->sg_Flags |= SGF_HASASS;
6100 if (type->ty_Flags & TF_HASLVREF)
6101 sg->sg_Flags |= SGF_HASLVREF;
6102 if (type->ty_Flags & TF_HASCONSTRUCT)
6103 sg->sg_Flags |= SGF_ABICALL;
6104 if (type->ty_Flags & TF_HASDESTRUCT)
6105 sg->sg_Flags |= SGF_ABICALL;
6106 if (type->ty_Flags & TF_HASGCONSTRUCT)
6107 sg->sg_Flags |= SGF_ABICALL;
6108 if (type->ty_Flags & TF_HASGDESTRUCT)
6109 sg->sg_Flags |= SGF_ABICALL;
6111 case DOP_GLOBAL_STORAGE:
6112 type = d->d_StorDecl.ed_Type;
6113 sg->sg_GlobalBytes = BASEALIGN(
6116 d->d_Offset = sg->sg_GlobalBytes;
6117 sg->sg_GlobalBytes += d->d_Bytes;
6118 if (d->d_StorDecl.ed_OrigAssExp)
6119 sg->sg_Flags |= SGF_GHASASS;
6120 if (type->ty_Flags & TF_HASASS)
6121 sg->sg_Flags |= SGF_GHASASS;
6122 if (type->ty_Flags & TF_HASLVREF)
6123 sg->sg_Flags |= SGF_GHASLVPTR;
6124 if (type->ty_Flags & TF_HASCONSTRUCT)
6125 sg->sg_Flags |= SGF_ABICALL;
6126 if (type->ty_Flags & TF_HASDESTRUCT)
6127 sg->sg_Flags |= SGF_ABICALL;
6128 if (type->ty_Flags & TF_HASGCONSTRUCT)
6129 sg->sg_Flags |= SGF_ABICALL;
6130 if (type->ty_Flags & TF_HASGDESTRUCT)
6131 sg->sg_Flags |= SGF_ABICALL;
6139 * The byte size of the block does not have to be aligned, but
6140 * aligning it (within reason) might provide a benefit.
6142 sg->sg_Bytes = base;
6146 if (sg->sg_AlignMask < 256) {
6147 sg->sg_Bytes = BASEALIGN(base, sg->sg_AlignMask);
6149 if (sg->sg_GlobalAlignMask < 256) {
6150 sg->sg_GlobalBytes = BASEALIGN(sg->sg_GlobalBytes,
6151 sg->sg_GlobalAlignMask);
6154 sg->sg_BlkBytes = sg->sg_Bytes - sg->sg_BlkOffset;
6155 sg->sg_Flags |= SGF_FRESOLVED;
6156 sg->sg_Flags &= ~SGF_FRESOLVING;
6160 * Figure out how much temporary space we need to be able to execute
6161 * statements and expressions. Temporary space, like the main procedural
6162 * space, must be inherited from and consolidated into the top-level
6166 base = sg->sg_TmpBytes;
6167 gbase = sg->sg_GlobalTmpBytes;
6170 * Root ST_Import. avoid compiler warnings
6178 switch (st->st_Op) {
6180 if (st->st_ImportStmt.es_DLL) {
6181 void (*func) (void)= dlsym(st->st_ImportStmt.es_DLL,
6191 if (st->st_TypedefStmt.es_Decl->d_Flags & DF_RESOLVED) {
6192 resolveDeclStorage(st->st_TypedefStmt.es_Decl,
6193 base, &limit, gbase, &glimit);
6198 * Temporary space for declarations are handled here.
6200 * Resolve declarations, skipping any whos context was moved to a
6201 * class (e.g. a declaration at the top level of a file like
6202 * Fd.setfd(...) also exists in the Fd class).
6208 d = st->st_DeclStmt.es_Decl;
6210 if (d->d_Op == DOP_GLOBAL_STORAGE)
6211 st->st_DeclStmt.es_TmpOffset = gbase;
6213 st->st_DeclStmt.es_TmpOffset = base;
6214 for (i = 0; i < st->st_DeclStmt.es_DeclCount; ++i) {
6216 if (st->st_MyGroup != d->d_MyGroup) {
6217 /* printf("SKIPB %s\n", d->d_Id) */ ;
6219 * resolveDeclStorage(d, base, &limit, gbase, &glimit);
6221 } else if (d->d_Flags & DF_RESOLVED) {
6222 resolveDeclStorage(d, base, &limit,
6227 resolveDeclStorage(d, base, &limit,
6230 d = RUNE_NEXT(d, d_Node);
6242 if (st->st_LoopStmt.es_BCond) {
6243 resolveStorageExp(st->st_LoopStmt.es_BCond,
6246 if (st->st_LoopStmt.es_ACond) {
6247 resolveStorageExp(st->st_LoopStmt.es_ACond,
6250 if (st->st_LoopStmt.es_AExp) {
6251 resolveStorageExp(st->st_LoopStmt.es_AExp,
6261 resolveStorageExp(st->st_IfStmt.es_Exp, base, &limit);
6264 if (st->st_RetStmt.es_Exp)
6265 resolveStorageExp(st->st_RetStmt.es_Exp, base, &limit);
6268 if (st->st_ResStmt.es_Exp)
6269 resolveStorageExp(st->st_ResStmt.es_Exp, base, &limit);
6273 * The switch expression's temporary data must be saved while we are
6274 * executing the sub-statements (the cases).
6277 urunesize_t xlimit = base;
6278 resolveStorageExp(st->st_SwStmt.es_Exp, base, &xlimit);
6285 if (st->st_CaseStmt.es_Exp)
6286 resolveStorageExp(st->st_CaseStmt.es_Exp, base, &limit);
6289 resolveStorageExp(st->st_ExpStmt.es_Exp, base, &limit);
6291 case ST_ThreadSched:
6294 dassert_stmt(st, 0);
6298 * Calculate storage requirements for substatements. (base) may have
6299 * been adjusted if this statement level's temporary storage needs to be
6300 * retained (aka switch() expression).
6302 * Note that often scan->sg_MyGroup == sg.
6304 RUNE_FOREACH(scan, &st->st_List, st_Node) {
6305 dassert(scan->st_Op != ST_Proc);
6306 if (scan->st_Op == ST_Class) {
6307 ResolveStorage(scan, flags);
6308 } else if (scan->st_Op == ST_Decl) {
6310 * Ignore declarations here, they will be handled in the semgroup
6311 * scan in the next loop
6313 } else if (scan->st_Op == ST_Proc) {
6314 /* Do not resolve template procedures! */
6315 char buf[RUNE_IDTOSTR_LEN];
6316 fprintf(stderr, "STORAGE %s\n",
6317 runeid_text(scan->st_ProcStmt.es_Decl->d_Id, buf));
6318 if (scan->st_ProcStmt.es_Decl->d_ProcDecl.ed_OrigBody == scan) {
6323 } else if (scan->st_Flags & STF_SEMTOP) {
6324 assert(scan->st_MyGroup != sg);
6325 ResolveStorage(scan, flags);
6328 * This is a bit of a mess. The baseline sg_TmpBytes needs to be
6329 * set so calculated temporary offsets are relative to it, and
6330 * then restored. Otherwise we might blow away the
6331 * SGF_TMPRESOLVED SemGroup
6335 urunesize_t save_offset;
6336 urunesize_t save_goffset;
6338 save_offset = scan->st_MyGroup->sg_TmpBytes;
6339 save_goffset = scan->st_MyGroup->sg_GlobalTmpBytes;
6340 scan->st_MyGroup->sg_TmpBytes = base;
6341 scan->st_MyGroup->sg_GlobalTmpBytes = gbase;
6342 ResolveStorage(scan, flags);
6344 if (scan->st_MyGroup->sg_TmpBytes < save_offset)
6345 scan->st_MyGroup->sg_TmpBytes = save_offset;
6346 if (scan->st_MyGroup->sg_GlobalTmpBytes < save_goffset) {
6347 scan->st_MyGroup->sg_GlobalTmpBytes = save_goffset;
6349 if (limit < scan->st_MyGroup->sg_TmpBytes)
6350 limit = scan->st_MyGroup->sg_TmpBytes;
6351 if (glimit < scan->st_MyGroup->sg_GlobalTmpBytes)
6352 glimit = scan->st_MyGroup->sg_GlobalTmpBytes;
6357 * If this is a new semantic level call resolveStorageSemGroup() to do
6358 * the final cleanup of SemGroup issues. This will redundantly calculate
6359 * temporary space requirements. Also, due to type/class references the
6360 * temporary space for a class may have already been resolved. Since a
6361 * class can only contain declarations it had better match what we
6364 * Note that for non-Class executable SemGroup's TmpBytes is incorporated
6365 * in a downward fashion while sg_Bytes is incorporated in an upward
6366 * fashion. It can become quite confusing. Don't ask me why I did it
6369 if (st->st_Flags & STF_SEMANTIC) {
6370 if ((sg->sg_Flags & SGF_TMPRESOLVED) == 0) {
6371 resolveStorageSemGroup(sg, limit, &limit,
6374 dassert(sg->sg_TmpBytes == limit &&
6375 sg->sg_GlobalTmpBytes == glimit);
6378 sg->sg_TmpBytes = limit;
6379 sg->sg_GlobalTmpBytes = glimit;
6380 } /* else this is the Root st_Import */
6382 if ((st->st_Flags & (STF_SEMANTIC | STF_SEMTOP)) == STF_SEMANTIC) {
6383 dassert(sg->sg_Parent->sg_Bytes <= sg->sg_Bytes);
6384 sg->sg_Parent->sg_Bytes = sg->sg_Bytes;
6389 * resolveDeclStorage() - resolve the storage reservation required to process
6392 * This is an expression tree traversal storage resolution procedure. We have
6393 * to traverse through declarations to get to default assignments and such.
6395 * If a declaration has no assigned default the underlying type may itself
6396 * have an assigned default which must be dealt with.
6399 resolveDeclAlign(Declaration *d, urunesize_t *expalignp, int flags)
6401 if (flags & RESOLVE_CLEAN) {
6402 if ((d->d_Flags & DF_ALIGNRESOLVE) == 0)
6404 d->d_Flags &= ~(DF_ALIGNRESOLVE | DF_TMPRESOLVED);
6406 if (d->d_Flags & DF_ALIGNRESOLVE) {
6407 if (*expalignp < d->d_AlignMask)
6408 *expalignp = d->d_AlignMask;
6411 d->d_Flags |= DF_ALIGNRESOLVE;
6416 /* recursion already dealt with */
6418 case DOP_ARGS_STORAGE:
6419 case DOP_STACK_STORAGE:
6420 case DOP_GROUP_STORAGE:
6422 Type *type = d->d_StorDecl.ed_Type;
6424 resolveTypeAlign(type, expalignp, flags);
6425 if (d->d_StorDecl.ed_AssExp) {
6426 resolveExpAlign(d->d_StorDecl.ed_AssExp, expalignp, flags);
6430 case DOP_GLOBAL_STORAGE:
6432 Type *type = d->d_StorDecl.ed_Type;
6434 resolveTypeAlign(type, expalignp, flags);
6435 if (d->d_StorDecl.ed_AssExp) {
6436 resolveExpAlign(d->d_StorDecl.ed_AssExp, expalignp, flags);
6442 * Never try to resolve storage considerations for an alias's
6443 * assignment in the declaration itself. The run-time context
6444 * depends on who and how many other parts of the program reference
6445 * the alias and the expression tree will be duplicated for each.
6448 resolveStorageExpExp(d->d_AliasDecl.ed_AssExp, expalignp);
6452 /* XXX what about ty_AssExp ? should be in global space */
6455 /* recursion already dealt with */
6459 * Resolution of procedure declarations might have been deferred (see
6460 * TOK_ID in ResolveExp()).
6462 /* ResolveDecl(d, 0); */
6466 if ((st = d->d_ProcDecl.ed_ProcBody) != NULL) {
6467 ResolveAlignment(st, 0);
6478 resolveDynamicDeclAlign(Declaration *d, urunesize_t *expalignp, int flags)
6482 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
6483 if (scan->d_MyGroup &&
6484 (scan->d_MyGroup->sg_Flags & (SGF_RESOLVING |
6486 resolveDeclAlign(scan, expalignp, flags);
6489 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
6490 if (scan->d_SubBase)
6491 resolveDynamicDeclAlign(scan, expalignp, flags);
6496 resolveDeclStorage(Declaration * d,
6497 urunesize_t base, urunesize_t *limitp,
6498 urunesize_t gbase, urunesize_t *glimitp)
6500 dassert(d->d_Flags & DF_ALIGNRESOLVE);
6501 if (d->d_Flags & DF_TMPRESOLVED)
6503 d->d_Flags |= DF_TMPRESOLVED;
6507 /* recursion already dealt with */
6509 case DOP_ARGS_STORAGE:
6510 case DOP_STACK_STORAGE:
6511 case DOP_GROUP_STORAGE:
6513 Type *type = d->d_StorDecl.ed_Type;
6515 resolveStorageType(type, 0, base, limitp);
6516 if (d->d_StorDecl.ed_AssExp) {
6517 resolveStorageExp(d->d_StorDecl.ed_AssExp, base, limitp);
6521 case DOP_GLOBAL_STORAGE:
6523 Type *type = d->d_StorDecl.ed_Type;
6525 resolveStorageType(type, 1, gbase, glimitp);
6526 if (d->d_StorDecl.ed_AssExp) {
6527 resolveStorageExp(d->d_StorDecl.ed_AssExp, gbase, glimitp);
6533 * Never try to resolve storage considerations for an alias's
6534 * assignment in the declaration itself. The run-time context
6535 * depends on who and how many other parts of the program reference
6536 * the alias and the expression tree will be duplicated for each.
6539 if (d->d_ScopeFlags & SCOPE_GLOBAL)
6540 resolveStorageExp(d->d_AliasDecl.ed_AssExp, NULL, NULL);
6542 resolveStorageExp(d->d_AliasDecl.ed_AssExp, NULL, NULL);
6546 /* XXX what about ty_AssExp ? should be in global space */
6549 /* recursion already dealt with */
6555 if ((st = d->d_ProcDecl.ed_ProcBody) != NULL) {
6556 ResolveStorage(st, 0);
6566 * Make this temporary for now so we can re-run it.
6568 d->d_Flags &= ~DF_TMPRESOLVED;
6574 resolveDynamicDeclStorage(Declaration * d,
6575 urunesize_t base, urunesize_t *limitp,
6576 urunesize_t gbase, urunesize_t *glimitp)
6580 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
6581 if (scan->d_MyGroup &&
6582 (scan->d_MyGroup->sg_Flags & (SGF_RESOLVING |
6584 resolveDeclStorage(scan, base, limitp, gbase, glimitp);
6587 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
6588 if (scan->d_SubBase) {
6589 resolveDynamicDeclStorage(scan, base, limitp,
6597 * resolveStorageExpOnly()
6599 * Resolve temporary storage for this exp structure, do not recurse
6600 * sub-expressions. Any type-temporary storage is tacked onto the end of
6601 * this expression's temporary area.
6603 * We do not need to assign storage for expressions which return lvalues,
6604 * because they will simply return a pointer into non-temporary storage.
6607 resolveStorageExpOnly(Exp *exp, urunesize_t base, urunesize_t *limitp)
6612 * Stop if the expression resolves to a type rather then a value, e.g.
6613 * when you do something like switch (typeof(int)) { ... } Types are
6614 * handled as thin pointers.
6616 exp->ex_Flags |= EXF_TMPRESOLVED;
6617 if (exp->ex_Flags & EXF_RET_TYPE) {
6618 exp->ex_TmpOffset = BASEALIGN(base, RAWPTR_ALIGN);
6619 SIZELIMIT(base, sizeof(void *), limitp);
6626 if (d->d_Flags & DF_RESOLVED) {
6627 resolveDeclStorage(d, base, limitp, base, limitp);
6632 * Assign temporary offset. This offset does not overlap temporary space
6633 * reserved for sub-expressions.
6635 * We must have an assigned type. Expression sequences like:
6636 * 'module.blah' are collapsed into 'blah' long before we get here, or
6637 * they should be. We should not encounter any TOK_TCMV_ID expression
6638 * tokens. Structural id's (the right hand side of X.Y) are resolved by
6639 * their parent expression node and no typing or temporary space is
6642 * Expressions that return lvalues do not need temporary space.
6644 type = exp->ex_Type;
6646 switch (exp->ex_Token) {
6651 printf("EXP %p %04x %p\n",
6652 exp, exp->ex_Token, exp->ex_Decl);
6653 dassert_exp(exp, 0);
6656 exp->ex_TmpOffset = -3;
6657 } else if (type->ty_SQFlags & SF_LVALUE) {
6659 * Expressive elements which return lvalues do not get temporary
6660 * space. Note that this also prevents lvalues such as large arrays
6661 * (int ary[999999999]) from reserving unnecessary stack space.
6663 * NOTE: SF_LVALUE is unrelated to SCOPE_LVALUE. SCOPE_LVALUE
6664 * applies to SemGroup storage (LValueStor). SF_LVALUE merely flags
6665 * the type for an expression as expecting or not expecting an
6670 * XXX removeme, LValueStor only applies to semgroups
6672 runesize_t ulvmask = sizeof(LValueStor) - 1;
6673 *offset = (*offset + lvmask) & ~lvmask;
6674 exp->ex_TmpOffset = *offset;
6675 *offset = *offset + (lvmask + 1);
6677 exp->ex_TmpOffset = -2;
6680 * Reserve temporary space for potential intermediate results.
6682 * Compound expressions may need extra space to default-init the
6683 * compound value, it is expected to be available to the generator
6684 * right after the nominal type in the TmpOffset. XXX also make
6685 * available to the interpreter?
6687 * Procedure calls also may need extra space to default-init the
6688 * return value. XXX also make available to the interpreter?
6690 base = BASEALIGN(base, type->ty_AlignMask);
6693 * It may be convenient to use a larger alignment for arrays, which
6694 * would allow (e.g.) %xmm registers to be used on 64-bit arrays for
6695 * moves. Limit to 16-byte alignment for now.
6697 * (See also resolveExpAlign())
6699 if (type->ty_Op == TY_ARYOF || type->ty_Op == TY_COMPOUND ||
6700 type->ty_Op == TY_ARGS) {
6701 if (type->ty_Bytes >= 16) {
6702 base = BASEALIGN(base, 15);
6703 } else if (type->ty_Bytes >= 8) {
6704 base = BASEALIGN(base, 7);
6705 } else if (type->ty_Bytes >= 4) {
6706 base = BASEALIGN(base, 3);
6711 * Temporary storage for this exp
6713 exp->ex_TmpOffset = base;
6714 SIZELIMIT(base, type->ty_Bytes, limitp);
6717 * A compound expression's type may need additional temporary
6718 * storage. NOTE: The type might not yet be changed to TY_COMPOUND,
6719 * but single-element compounds will use the same temporary space as
6722 * A procedure call may need additional temporary storage.
6724 * (base was adjusted above and is exp->ex_TmpOffset)
6726 if (exp->ex_Token == TOK_COMPOUND) {
6728 * NOTE: type might not yet be changed to compound, but
6729 * single-element compound will use the same temporary space.
6731 resolveStorageType(type, 0, base + type->ty_Bytes, limitp);
6732 } else if (exp->ex_Token == TOK_CALL) {
6733 resolveStorageType(type, 0, base + type->ty_TmpBytes, limitp);
6736 dassert(exp->ex_TmpOffset != -1);
6740 * Calculate the overlapping temporary space for sub-expression trees.
6743 resolveStorageExpSub(Exp *exp, urunesize_t base, urunesize_t *limitp)
6746 resolveStorageType(exp->ex_Type, 0, base, limitp);
6750 * Make sure resolved declarations have resolved temporary storage for
6751 * assigned expressions. XXX pure test
6753 if (exp->ex_Token == TOK_ID || exp->ex_Token == TOK_CLASSID) {
6757 if (d && (d->d_Flags & DF_RESOLVED)) {
6758 resolveDeclStorage(d, base, limitp,
6761 /* note: UNARY can be set for aliases */
6766 * Calculate the overlapping temporary space for sub-trees.
6768 if (exp->ex_Flags & EXF_BINARY) {
6770 * Ensure lhs's NON-RECURSIVE temporary storage on-return does not
6771 * intefere with rhs's, or vise-versa.
6773 * To do this offset the rhs storage by the non-recursive lhs
6776 resolveStorageExp(exp->ex_Lhs, base, limitp);
6777 if (exp->ex_Lhs->ex_TmpOffset >= 0) {
6778 resolveStorageExp(exp->ex_Rhs,
6779 exp->ex_Lhs->ex_TmpOffset +
6780 exp->ex_Lhs->ex_Type->ty_Bytes,
6783 resolveStorageExp(exp->ex_Rhs, base, limitp);
6786 urunesize_t xoffset;
6787 urunesize_t roffset;
6791 resolveStorageExp(exp->ex_Lhs, &xoffset);
6792 if (*offset < xoffset)
6794 if (exp->ex_Lhs->ex_TmpOffset >= 0) {
6795 xoffset = exp->ex_Lhs->ex_TmpOffset +
6796 exp->ex_Lhs->ex_Type->ty_Bytes;
6800 resolveStorageExp(exp->ex_Rhs, &xoffset);
6801 if (*offset < xoffset)
6804 } else if (exp->ex_Flags & EXF_UNARY) {
6805 resolveStorageExp(exp->ex_Lhs, base, limitp);
6806 dassert_exp(exp, exp->ex_Lhs->ex_Next == NULL);
6807 } else if (exp->ex_Flags & EXF_COMPOUND) {
6809 * Each element will be copied into the compound storage in turn, so
6810 * we can union the temporary storage required for each element.
6814 for (scan = exp->ex_Lhs; scan; scan = scan->ex_Next) {
6815 dassert_exp(scan, scan->ex_Type != NULL);
6816 resolveStorageExp(scan, base, limitp);
6820 if (exp->ex_Token == TOK_CALL) {
6821 resolveDynamicProcedureStorage(exp, base, limitp, base, limitp);
6822 } else if (exp->ex_Token == TOK_INLINE_CALL) {
6823 Stmt *st = exp->ex_AuxStmt;
6824 SemGroup *sg = st->st_MyGroup;
6826 urunesize_t obytes = sg->sg_Parent->sg_Bytes;
6829 /* dassert((exp->ex_Flags & EXF_DUPEXP) == 0); */
6830 dassert(exp->ex_Flags & EXF_BINARY);
6831 dassert((st->st_Flags & (STF_SEMTOP | STF_SEMANTIC)) ==
6834 printf("%p Resolve inline storage %ld %s (%p)\n", exp,
6835 *offset, exp->ex_Lhs->ex_Decl->d_Id,
6837 printf("ST %p\n", st);
6840 dassert((st->st_Flags & STF_TMPRESOLVED) == 0);
6841 dassert((sg->sg_Flags & SGF_TMPRESOLVED) == 0);
6842 sg->sg_TmpBytes = BASEALIGN(*limitp, sg->sg_TmpAlignMask);
6843 /* sg->sg_Bytes set automatically using parent */
6844 dassert(sg->sg_Parent);
6845 ResolveStorage(st, 0);
6847 printf("%p End resolve (%ld, %ld) (%ld, %ld) (%ld, %ld)\n",
6849 *offset, sg->sg_TmpBytes,
6850 obytes, sg->sg_Bytes,
6851 sg->sg_BlkOffset, sg->sg_BlkBytes);
6853 dassert(*limitp <= sg->sg_TmpBytes);
6854 *limitp = sg->sg_TmpBytes;
6855 /* sg->sg_Parent->sg_Bytes set automatically */
6856 resolveDynamicProcedureStorage(exp, base, limitp, base, limitp);
6861 * [re]resolve temporary storage requirements.
6863 * Currently we do not overlap exp's temporary space with that of the
6866 * WARNING! This may be called more than once if an expression requires
6867 * resolve-time interpretation to generate a constant. In this ex_TmpOffset
6868 * for the sub-chain may be regenerated from 0, and then just the top-level
6869 * (post-constant-resolved) ex_TmpOffset will be restored by the caller.
6872 resolveStorageExp(Exp *exp, urunesize_t base, urunesize_t *limitp)
6874 resolveStorageExpOnly(exp, base, limitp);
6875 if ((exp->ex_Flags & EXF_RET_TYPE) == 0) {
6876 if (exp->ex_TmpOffset >= 0) {
6877 resolveStorageExpSub(exp,
6879 exp->ex_Type->ty_Bytes,
6882 resolveStorageExpSub(exp, base, limitp);
6888 resolveExpAlign(Exp *exp, urunesize_t *expalignp, int flags)
6892 if (exp->ex_Flags & EXF_RET_TYPE) {
6893 if (*expalignp < RAWPTR_ALIGN)
6894 *expalignp = RAWPTR_ALIGN;
6898 type = exp->ex_Type;
6904 if (type->ty_SQFlags & SF_LVALUE) {
6905 if (*expalignp < LVALUESTOR_ALIGN)
6906 *expalignp = LVALUESTOR_ALIGN;
6908 if (*expalignp < type->ty_AlignMask)
6909 *expalignp = type->ty_AlignMask;
6911 resolveTypeAlign(type, expalignp, flags);
6914 * It may be convenient to use a larger alignment for arrays, which
6915 * would allow (e.g.) %xmm registers to be used on 64-bit arrays for
6916 * moves. Limit to 16-byte alignment for now.
6918 * (See also resolveStorageExpOnly())
6920 if (type->ty_Op == TY_ARYOF || type->ty_Op == TY_COMPOUND ||
6921 type->ty_Op == TY_ARGS) {
6923 if (type->ty_Bytes >= 64) {
6924 if (*expalignp < 63)
6926 } else if (type->ty_Bytes >= 32) {
6927 if (*expalignp < 31)
6931 if (type->ty_Bytes >= 16) {
6932 if (*expalignp < 15)
6934 } else if (type->ty_Bytes >= 8) {
6937 } else if (type->ty_Bytes >= 4) {
6948 if (d->d_Flags & DF_RESOLVED) {
6949 resolveDeclAlign(d, expalignp, flags);
6954 * This typically only occurs when the resolver needs to evaluate a
6955 * constant expression. The declaration is typically not resolved at
6958 if (exp->ex_Token == TOK_ID || exp->ex_Token == TOK_CLASSID) {
6963 if (d && (d->d_Flags & DF_RESOLVED)) {
6964 resolveDeclAlign(d, expalignp, flags);
6966 /* note: UNARY can be set for aliases */
6971 * Recurse through for an inline call, then roll-up the alignment
6972 * requirement(s) for the target procedure. We handle the 'arguments'
6973 * and 'return value' alignment in EXF_BINARY below.
6975 if (exp->ex_Token == TOK_CALL) {
6976 resolveDynamicProcedureAlign(exp, expalignp, flags);
6977 } else if (exp->ex_Token == TOK_INLINE_CALL) {
6980 ResolveAlignment(exp->ex_AuxStmt, flags);
6981 asg = exp->ex_AuxStmt->st_MyGroup;
6982 if (*expalignp < asg->sg_TmpAlignMask)
6983 *expalignp = asg->sg_TmpAlignMask;
6984 resolveDynamicProcedureAlign(exp, expalignp, flags);
6990 if (exp->ex_Flags & EXF_BINARY) {
6991 resolveExpAlign(exp->ex_Lhs, expalignp, flags);
6992 resolveExpAlign(exp->ex_Rhs, expalignp, flags);
6993 } else if (exp->ex_Flags & EXF_UNARY) {
6994 resolveExpAlign(exp->ex_Lhs, expalignp, flags);
6995 } else if (exp->ex_Flags & EXF_COMPOUND) {
6998 for (scan = exp->ex_Lhs; scan; scan = scan->ex_Next) {
6999 resolveExpAlign(scan, expalignp, flags);
7005 * resolveStorageType() - temporary space required to initialize type
7008 * Figure out the temporary space required to initialize a type's defaults.
7009 * Note that the space will be figured independantly for any SemGroup's.
7013 resolveTypeAlign(Type *type, urunesize_t *expalignp, int flags)
7015 SemGroup *sg = NULL;
7016 Type *subtype1 = NULL;
7017 Type *subtype2 = NULL;
7019 dassert(type->ty_Flags & TF_RESOLVED);
7020 if (flags & RESOLVE_CLEAN) {
7021 if ((type->ty_Flags & TF_ALIGNRESOLVED) == 0)
7023 type->ty_Flags &= ~(TF_ALIGNRESOLVED | TF_TMPRESOLVED);
7025 if (type->ty_Flags & TF_ALIGNRESOLVED) {
7026 if (*expalignp < type->ty_TmpAlignMask)
7027 *expalignp = type->ty_TmpAlignMask;
7030 type->ty_Flags |= TF_ALIGNRESOLVED;
7033 switch (type->ty_Op) {
7035 sg = type->ty_ClassType.et_SemGroup;
7038 subtype1 = type->ty_AryType.et_Type;
7041 sg = type->ty_CompType.et_SemGroup;
7044 subtype1 = type->ty_ProcType.et_ArgsType;
7045 subtype2 = type->ty_ProcType.et_RetType;
7048 sg = type->ty_ImportType.et_SemGroup;
7051 sg = type->ty_ArgsType.et_SemGroup;
7054 sg = type->ty_VarType.et_SemGroup;
7057 /* has nothing to do with initializing the pointer */
7058 /* subtype1 = type->ty_RawPtrType.et_Type; */
7061 /* has nothing to do with initializing the pointer */
7062 /* subtype1 = type->ty_RefType.et_Type; */
7067 * nothing to be done here.
7070 case TY_UNRESOLVED: /* should be no unresolved types at this
7073 dassert_type(type, 0);
7077 resolveTypeAlign(subtype1, &subtype1->ty_TmpAlignMask, flags);
7078 if (subtype1->ty_AssExp) {
7079 resolveExpAlign(subtype1->ty_AssExp,
7080 &subtype1->ty_TmpAlignMask,
7083 if (type->ty_TmpAlignMask < subtype1->ty_TmpAlignMask)
7084 type->ty_TmpAlignMask = subtype1->ty_TmpAlignMask;
7087 resolveTypeAlign(subtype2, &subtype2->ty_TmpAlignMask, flags);
7088 if (subtype2->ty_AssExp) {
7089 resolveExpAlign(subtype2->ty_AssExp,
7090 &subtype2->ty_TmpAlignMask,
7093 if (type->ty_TmpAlignMask < subtype2->ty_TmpAlignMask)
7094 type->ty_TmpAlignMask = subtype2->ty_TmpAlignMask;
7096 if (type->ty_AssExp) {
7097 resolveExpAlign(type->ty_AssExp,
7098 &type->ty_TmpAlignMask,
7102 dassert(sg->sg_Flags & SGF_RESOLVED);
7103 /* ResolveSemGroup(sg, 0); */
7104 resolveSemGroupAlign(sg, flags);
7105 if (type->ty_TmpAlignMask < sg->sg_TmpAlignMask)
7106 type->ty_TmpAlignMask = sg->sg_TmpAlignMask;
7108 if (*expalignp < type->ty_TmpAlignMask)
7109 *expalignp = type->ty_TmpAlignMask;
7114 resolveStorageType(Type *type, int isglob,
7115 urunesize_t base, urunesize_t *limitp)
7117 SemGroup *sg = NULL;
7118 Type *subtype1 = NULL;
7119 Type *subtype2 = NULL;
7121 dassert(type->ty_Flags & TF_ALIGNRESOLVED);
7122 if (type->ty_Flags & TF_TMPRESOLVED) {
7123 base = BASEALIGN(base, type->ty_TmpAlignMask);
7124 SIZELIMIT(base, type->ty_TmpBytes, limitp);
7127 type->ty_Flags |= TF_TMPRESOLVED;
7129 switch (type->ty_Op) {
7131 sg = type->ty_ClassType.et_SemGroup;
7134 subtype1 = type->ty_AryType.et_Type;
7137 sg = type->ty_CompType.et_SemGroup;
7140 subtype1 = type->ty_ProcType.et_ArgsType;
7141 subtype2 = type->ty_ProcType.et_RetType;
7144 sg = type->ty_ImportType.et_SemGroup;
7147 sg = type->ty_ArgsType.et_SemGroup;
7150 sg = type->ty_VarType.et_SemGroup;
7153 /* has nothing to do with initializing the pointer */
7154 /* subtype1 = type->ty_RawPtrType.et_Type; */
7157 /* has nothing to do with initializing the pointer */
7158 /* subtype1 = type->ty_RefType.et_Type; */
7163 * nothing to be done here.
7166 case TY_UNRESOLVED: /* should be no unresolved types at this
7169 dassert_type(type, 0);
7173 resolveStorageType(subtype1, 0,
7174 0, &subtype1->ty_TmpBytes);
7175 if (subtype1->ty_AssExp) {
7176 /* XXX base is 0? */
7177 resolveStorageExp(subtype1->ty_AssExp,
7178 0, &subtype1->ty_TmpBytes);
7180 base = BASEALIGN(base, subtype1->ty_TmpAlignMask);
7181 SIZELIMIT(base, subtype1->ty_TmpBytes, limitp);
7183 if (type->ty_TmpAlignMask < subtype1->ty_TmpAlignMask)
7184 type->ty_TmpAlignMask = subtype1->ty_TmpAlignMask;
7188 resolveStorageType(subtype2, 0,
7189 0, &subtype2->ty_TmpBytes);
7190 if (subtype2->ty_AssExp) {
7191 /* XXX base is 0? */
7192 resolveStorageExp(subtype2->ty_AssExp,
7193 0, &subtype2->ty_TmpBytes);
7195 base = BASEALIGN(base, subtype2->ty_TmpAlignMask);
7196 SIZELIMIT(base, subtype2->ty_TmpBytes, limitp);
7198 if (type->ty_TmpAlignMask < subtype2->ty_TmpAlignMask)
7199 type->ty_TmpAlignMask = subtype2->ty_TmpAlignMask;
7202 if (type->ty_AssExp) {
7203 /* XXX base is 0? */
7204 resolveStorageExp(type->ty_AssExp, 0, &type->ty_TmpBytes);
7208 dassert(sg->sg_Flags & SGF_RESOLVED);
7209 resolveStorageSemGroup(sg, 0, NULL, 0, NULL);
7212 base = BASEALIGN(base, sg->sg_GlobalAlignMask);
7213 base = BASEALIGN(base, sg->sg_TmpAlignMask);
7214 SIZELIMIT(base, sg->sg_GlobalTmpBytes, limitp);
7216 base = BASEALIGN(base, sg->sg_TmpAlignMask);
7217 SIZELIMIT(base, sg->sg_TmpBytes, limitp);
7221 * Re-resolve the type flags. XXX mostly fixed once I handled
7222 * CBase/DBase/GBase in resolveSemGroup1().
7224 if (sg->sg_Flags & SGF_HASASS)
7225 type->ty_Flags |= TF_HASASS;
7227 type->ty_Flags |= TF_HASLVREF;
7228 if (sg->sg_Flags & SGF_VARARGS)
7229 type->ty_Flags |= TF_HASLVREF; /* XXX TF_VARARGS */
7231 type->ty_Flags |= TF_HASCONSTRUCT;
7233 type->ty_Flags |= TF_HASDESTRUCT;
7239 * This is used to resolve temporary storage requirements for SemGroup's
7240 * related to classes and compound types. Temporary storage requirements are
7241 * calculated on a SemGroup-by-SemGroup basis and not aggregated into any
7244 * In the final pass we also reverse the constructor and destructor lists
7245 * (sg_CBase and sg_DBase), and the pointer/lvalue list (SRBase). These
7246 * lists were originally constructed by prepending and are thus in the wrong
7251 resolveSemGroupAlign(SemGroup *sg, int flags)
7256 * NOTE: SGF_RESOLVED might not be set, indicating that we were able to
7257 * pick-out individual declarations in (global) SGs without having to
7258 * resolve the whole group. This allows unused declarations to be
7259 * omitted by the code generator.
7261 if (flags & RESOLVE_CLEAN) {
7262 if ((sg->sg_Flags & SGF_ALIGNRESOLVED) == 0)
7264 sg->sg_Flags &= ~(SGF_ALIGNRESOLVED | SGF_TMPRESOLVED);
7266 if (sg->sg_Flags & SGF_ALIGNRESOLVED)
7268 sg->sg_Flags |= SGF_ALIGNRESOLVED;
7271 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
7273 if ((d->d_ScopeFlags & (SCOPE_CONSTRUCTOR |
7274 SCOPE_DESTRUCTOR))) {
7275 if ((sg->sg_Flags & SGF_RESOLVED) == 0 &&
7276 (sg->sg_Type == SG_MODULE ||
7277 sg->sg_Type == SG_CLASS)) {
7278 ResolveSemGroup(sg, 0);
7282 if ((d->d_Flags & DF_RESOLVED) == 0)
7284 resolveDeclAlign(d, &sg->sg_TmpAlignMask, flags);
7285 if (d->d_ScopeFlags & SCOPE_GLOBAL) {
7286 if (sg->sg_GlobalAlignMask < d->d_AlignMask)
7287 sg->sg_GlobalAlignMask = d->d_AlignMask;
7289 if (sg->sg_AlignMask < d->d_AlignMask)
7290 sg->sg_AlignMask = d->d_AlignMask;
7297 resolveStorageSemGroup(SemGroup *sg,
7298 urunesize_t base, urunesize_t *limitp,
7299 urunesize_t gbase, urunesize_t *glimitp)
7303 urunesize_t dummy_limit = 0;
7304 urunesize_t dummy_glimit = 0;
7306 if (limitp == NULL) {
7307 limitp = &dummy_limit;
7308 glimitp = &dummy_glimit;
7312 if ((sg->sg_Flags & SGF_RESOLVED) == 0) {
7313 ResolveSemGroup(sg, 0);
7316 dassert(sg->sg_Flags & SGF_ALIGNRESOLVED);
7317 if (sg->sg_Flags & SGF_TMPRESOLVED)
7319 sg->sg_Flags |= SGF_TMPRESOLVED;
7324 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
7325 if (d->d_Flags & DF_RESOLVED) {
7326 resolveDeclStorage(d, base, limitp, gbase, glimitp);
7333 if ((d2 = sg->sg_CBase) != NULL) {
7334 sg->sg_CBase = NULL;
7335 while ((d = d2) != NULL) {
7337 d->d_CNext = sg->sg_CBase;
7341 if ((d2 = sg->sg_DBase) != NULL) {
7342 sg->sg_DBase = NULL;
7343 while ((d = d2) != NULL) {
7345 d->d_DNext = sg->sg_DBase;
7349 if ((d2 = sg->sg_GBase) != NULL) {
7350 sg->sg_GBase = NULL;
7351 while ((d = d2) != NULL) {
7353 d->d_GNext = sg->sg_GBase;
7357 if ((d2 = sg->sg_SRBase) != NULL) {
7358 sg->sg_SRBase = NULL;
7359 while ((d = d2) != NULL) {
7361 d->d_SRNext = sg->sg_SRBase;
7365 sg->sg_TmpBytes = *limitp;
7366 sg->sg_GlobalTmpBytes = *glimitp;
7370 * Validate that the 'this' variable is a pointer-to-class or
7371 * reference-to-class. It can either be an lvalue or not so we
7372 * don't have to check that.
7374 * parse2.c may have auto-created the 'this' argument. If DF_AUTOTHIS
7375 * is set and the class is SCOPE_UNRESTRICTED, change the 'this' argument
7376 * from a reference to a pointer.
7380 methodCheckThisId(Type *type, Exp *exp)
7385 dassert(type->ty_Op == TY_PROC);
7386 dassert(type->ty_ProcType.et_ArgsType->ty_Op == TY_ARGS);
7387 sg = type->ty_ProcType.et_ArgsType->ty_CompType.et_SemGroup;
7388 d = RUNE_FIRST(&sg->sg_DeclList);
7389 dassert_exp(exp, d != NULL);
7390 dassert(d->d_Id == RUNEID_THIS);
7392 type = d->d_StorDecl.ed_Type;
7394 switch (type->ty_Op) {
7396 type = type->ty_RefType.et_Type;
7397 dassert_exp(exp, type->ty_Op == TY_CLASS);
7398 sg = type->ty_ClassType.et_SemGroup;
7399 dassert_exp(exp, sg->sg_Stmt->st_Op == ST_Class);
7401 if (sg->sg_Stmt->st_ClassStmt.es_Scope.s_Flags & SCOPE_UNRESTRICTED) {
7402 fprintf(stderr, "NOTE: resolver 7348 - unrestricted class\n");
7407 dassert_exp(exp, type->ty_RawPtrType.et_Type->ty_Op == TY_CLASS);
7410 fprintf(stderr, "Special method 'this' argument must be a "
7412 dassert_exp(exp, 0);
7420 * Calculate SG dependencies
7422 #define SGDEP_HSIZE 1024
7423 #define SGDEP_HMASK (SGDEP_HSIZE - 1)
7425 static SemGroup * SGCurrentDep;
7426 static SGDepend * SGDepHash[SGDEP_HSIZE];
7429 SGDepend * *resolveSGDependHash(SemGroup *src, SemGroup *dst) {
7432 hv = ((intptr_t) src >> 7) ^ ((intptr_t) dst >> 5);
7434 return (&SGDepHash[hv & SGDEP_HMASK]);
7439 resolvePushSGDepend(SemGroup *sg __unused)
7445 depp = resolveSGDependHash(SGCurrentDep, sg);
7446 for (dep = *depp; dep; dep = dep->hnext) {
7447 if (dep->src == SGCurrentDep && dep->dst == sg)
7451 dep = zalloc(sizeof(SGDepend));
7453 dep->src = SGCurrentDep;
7457 dep->next = SGCurrentDep->sg_DepFirst;
7458 SGCurrentDep->sg_DepFirst = dep;
7461 last = SGCurrentDep;
7468 resolvePopSGDepend(SemGroup *dep)
7476 * If we are resolving to a dynamic method call we need to flag all matching
7477 * current subclass decls for (d) not yet resolved to ensure they get
7478 * resolved if their related class is used at all, since the dynamic method
7479 * call might be trying to call any of them.
7481 static void resolveDynamicDecl(Declaration *d);
7485 resolveDynamicProcedure(SemGroup * isg __unused, SemGroup * sg __unused,
7486 Exp * exp, int flags __unused)
7493 type = lhs->ex_Lhs->ex_Type;
7496 if (lhs->ex_Token != TOK_STRIND || type->ty_Op != TY_REFTO)
7498 type = type->ty_RefType.et_Type;
7499 dassert_exp(exp, type->ty_Op == TY_CLASS);
7501 resolveDynamicDecl(d);
7506 resolveDynamicProcedureAlign(Exp *exp, urunesize_t *expalignp, int flags)
7513 type = lhs->ex_Lhs->ex_Type;
7516 if (lhs->ex_Token != TOK_STRIND || type->ty_Op != TY_REFTO)
7518 type = type->ty_RefType.et_Type;
7519 dassert_exp(exp, type->ty_Op == TY_CLASS);
7521 resolveDynamicDeclAlign(d, expalignp, flags);
7526 resolveDynamicProcedureStorage(Exp * exp,
7527 urunesize_t base, urunesize_t *limitp,
7528 urunesize_t gbase, urunesize_t *glimitp)
7535 type = lhs->ex_Lhs->ex_Type;
7538 if (lhs->ex_Token != TOK_STRIND || type->ty_Op != TY_REFTO)
7540 type = type->ty_RefType.et_Type;
7541 dassert_exp(exp, type->ty_Op == TY_CLASS);
7543 resolveDynamicDeclStorage(d, base, limitp, gbase, glimitp);
7548 resolveDynamicDecl(Declaration *d)
7552 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
7553 scan->d_Flags |= DF_DYNAMICREF;
7554 if (scan->d_MyGroup &&
7555 (scan->d_MyGroup->sg_Flags & (SGF_RESOLVING |
7557 ResolveDecl(scan, 0);
7560 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
7561 if (scan->d_SubBase)
7562 resolveDynamicDecl(scan);
7567 * Handle everything required to inline a procedure. Small procedures are
7568 * automatically inlined unless 'noinline' is specified. 'inline' must be
7569 * specified to inline large procedures. We can only inline when we know the
7570 * exact procedure in question, so ref-based method calls tend to prevent
7573 typedef struct xinline {
7574 struct xinline *prev;
7575 struct xinline *next;
7579 xinline_t XInlineTop;
7580 xinline_t *XInlineBot = &XInlineTop;
7584 resolveProcedureInline(SemGroup * isg __unused, SemGroup * sg __unused,
7585 Exp * exp, int flags __unused)
7596 * Do not inline of internal, clang call, marked as noinline, or
7597 * threaded. Do not inline a function which will probably return a
7598 * constant (and be optimized into one directly, inlining will slower
7599 * things down in that situation).
7601 if (d->d_ScopeFlags & (SCOPE_INTERNAL | SCOPE_CLANG | SCOPE_NOINLINE))
7603 if (d->d_ScopeFlags & (SCOPE_THREAD))
7605 if (exp->ex_Flags & EXF_PROBCONST)
7609 * XXX optimize this if the reference type is known explicitly, otherwise
7610 * we can't inline since it requires a dynamic call.
7612 if (lhs->ex_Token == TOK_STRIND &&
7613 lhs->ex_Lhs->ex_Type->ty_Op == TY_REFTO)
7617 * For now do not try to combine global data because each inline will get
7618 * its own instantiation, which is not what the programmer expects.
7620 st = d->d_ProcDecl.ed_ProcBody;
7623 if (st->st_MyGroup->sg_GlobalBytes || st->st_MyGroup->sg_GlobalTmpBytes)
7627 * XXX we should be able to allow var-args inlines, why doesn't this
7630 if (d->d_ProcDecl.ed_Type->ty_ProcType.et_ArgsType->
7631 ty_CompType.et_SemGroup->sg_Flags & SGF_VARARGS)
7635 * Do not inline the same procedure recursively, or if we can optimize
7636 * the procedure call into a constant by interpreting it once.
7638 if (d->d_Flags & DF_INLINING)
7640 if (exp->ex_Flags & EXF_CONST)
7644 * Do not inline if we do not know the precise procedure at resolve-time.
7646 if (d->d_Op != DOP_PROC || lhs->ex_Type->ty_Op == TY_REFTO)
7649 xin = zalloc(sizeof(*xin));
7650 xin->prev = XInlineBot;
7652 XInlineBot->next = xin;
7656 * We inline the procedure by duplicating the procedure body and changing
7657 * the procedure call ex. Disallow recursive inlining.
7659 * Set PARSE_TYPE on exLhs to retain exLhs->ex_Type across any further
7660 * duplication for the TOK_INLINE_CALL switch.
7662 d->d_Flags |= DF_INLINING;
7665 dassert((exp->ex_Flags & EXF_DUPEXP) == 0);
7666 exp->ex_Lhs->ex_Flags |= EXF_PARSE_TYPE;
7667 st = d->d_ProcDecl.ed_ProcBody;
7668 if (st->st_MyGroup->sg_Complexity < RuneInlineComplexity) {
7672 char buf[RUNE_IDTOSTR_LEN];
7675 printf("InlineTest: %5d", st->st_MyGroup->sg_Complexity);
7676 for (xscan = XInlineTop.next; xscan; xscan = xscan->next) {
7677 printf(".%s", runeid_text(xscan->d->d_Id, buf));
7681 altsg = st->st_MyGroup->sg_Parent;
7682 dassert(st->st_Flags & STF_SEMANTIC);
7683 st = DupStmt(st->st_MyGroup, NULL, st);
7684 st->st_ProcStmt.es_Decl = d;
7685 st->st_ProcStmt.es_Scope = d->d_Scope;
7686 st->st_Flags |= STF_INLINED_PROC;
7687 exp->ex_Token = TOK_INLINE_CALL;
7688 exp->ex_AuxStmt = st;
7691 * XXX sg_AltContext is actually what we want to have priority for
7692 * searches, not sg_Parent!
7694 ResolveStmt(d->d_ImportSemGroup, st, flags);
7695 st->st_MyGroup->sg_AltContext = altsg;
7696 st->st_MyGroup->sg_Flags |= SGF_ALTPRIORITY;
7699 * Link the inlined procedure's semantic context with our own so
7700 * stack storage is properly calculated. We must clear STF_SEMTOP
7701 * here or the alignment recursion will restart at 0.
7703 dassert(st->st_Flags & STF_SEMTOP);
7704 dassert(st->st_Flags & STF_SEMANTIC);
7705 st->st_Flags &= ~STF_SEMTOP;
7706 st->st_MyGroup->sg_Parent = sg;
7707 /* ResolveExp(isg, sg, exp, exp->ex_Type, flags); */
7710 d->d_Flags &= ~DF_INLINING;
7711 XInlineBot->next = NULL;
7712 XInlineBot = xin->prev;
7713 zfree(xin, sizeof(*xin));
7717 SpecialSemGroupGet(runeid_t id)
7725 case RUNEID_VA_COUNT:
7728 case RUNEID_VA_TYPE:
7731 case RUNEID_VA_DATA:
7734 case RUNEID_VA_VARCOUNT:
7735 s = SPECIAL_VAR_COUNT;
7737 case RUNEID_VA_VARTYPE:
7738 s = SPECIAL_VAR_TYPE;
7740 case RUNEID_VA_VARDATA:
7741 s = SPECIAL_VAR_DATA;
7743 case RUNEID_VA_TYPEID:
7746 case RUNEID_VA_TYPESTR:
7747 s = SPECIAL_TYPESTR;
7757 * Validate the user-supplied 'this' argument, or create an automatic
7758 * 'this' argument to a method procedure, or modify an automatic 'this'
7759 * argument as appropriate.
7761 * XXX User-supplied 'this' arguments are often declared incorrectly, for
7762 * example declared using thet super-class instead of using _t. Disallow
7765 * The 'this' argument to a method procedure can be:
7767 * lvalue class @this (used for new() and other special operators)
7768 * lvalue class *this (used for new() and other special operators)
7769 * class @this (default for normal class method)
7770 * class *this (default for unrestricte class method)
7772 * NOTE: References can be cast to pointers but pointers cannot be cast
7773 * back to references. Class embedding is only allowed for
7774 * SCOPE_UNRESTRICTED classes. Normal classes can only be
7775 * declared as references or pointed to (etc).
7777 * NOTE: This occurs prior to the SG being resolved, do not attempt to
7778 * resolve declaration types in here!
7781 ResolveMethodProcedureThisArg(SemGroup *sg, Declaration *d)
7790 dassert_decl(d, d->d_Op == DOP_PROC);
7792 type = d->d_ProcDecl.ed_Type;
7793 dassert(type->ty_Op == TY_PROC);
7795 if (type->ty_SQFlags & SF_METHOD) {
7797 * asg represents the procedure argument semgroup
7799 * ad represents the first argument. If it is not RUNEID_THIS,
7800 * create the 'this' argument.
7802 asg = type->ty_ProcType.et_ArgsType->ty_ArgsType.et_SemGroup;
7803 ad = RUNE_FIRST(&asg->sg_DeclList);
7804 if (ad == NULL || ad->d_Id != RUNEID_THIS) {
7806 * Create 'this' argument
7808 Scope tscope = INIT_SCOPE(SCOPE_ALL_VISIBLE);
7809 Type *ctype; /* class type */
7810 Stmt *st; /* class statement */
7812 dassert_decl(d, sg->sg_Type == SG_CLASS);
7815 ctype = AllocClassType(&sg->sg_ClassList,
7816 st->st_ClassStmt.es_Super,
7820 ad = AllocDeclaration(asg, DOP_ARGS_STORAGE, &tscope);
7821 if (st->st_ClassStmt.es_Scope.s_Flags & SCOPE_UNRESTRICTED) {
7822 ad->d_StorDecl.ed_Type = TypeToRawPtrType(ctype);
7824 ad->d_StorDecl.ed_Type = TypeToRefType(ctype);
7826 /* the auto 'this' argument is no longer an lvaluestor */
7827 /* ad->d_ScopeFlags |= SCOPE_LVALUE; */
7828 if (type->ty_SQFlags & SF_UNTRACKED)
7829 ad->d_ScopeFlags |= SCOPE_UNTRACKED;
7830 else if (type->ty_SQFlags & SF_UNLOCKED)
7831 ad->d_ScopeFlags |= SCOPE_UNLOCKED;
7832 else if (type->ty_SQFlags & SF_SOFT)
7833 ad->d_ScopeFlags |= SCOPE_SOFT;
7834 else if (type->ty_SQFlags & SF_HARD)
7835 ad->d_ScopeFlags |= SCOPE_HARD;
7837 ad->d_ScopeFlags |= SCOPE_HARD;
7839 ad->d_Flags |= DF_AUTOTHIS;
7840 HashDecl(ad, RUNEID_THIS);
7843 * Place at front of list
7845 RUNE_REMOVE(&asg->sg_DeclList, ad, d_Node);
7846 RUNE_INSERT_HEAD(&asg->sg_DeclList, ad, d_Node);
7850 * Finish validating and setting up the 'this' argument.
7852 dassert_decl(ad, ad->d_Id == RUNEID_THIS &&
7853 ad->d_Op == DOP_ARGS_STORAGE);
7854 dassert_decl(ad, sg->sg_Stmt->st_Op == ST_Class);
7857 thisType = ad->d_StorDecl.ed_Type;
7858 /*ResolveType(thisType, NULL, 0);*/
7862 if (thisType->ty_Op == TY_CLASS) {
7863 /* XXX sg_ClassList? right sg? */
7864 /* XXX correct visibility? */
7865 if (ad->d_Search == NULL)
7866 ad->d_Search = thisType->ty_ClassType.et_SemGroup;
7867 ad->d_StorDecl.ed_Type =
7868 AllocClassType(&sg->sg_ClassList,
7869 sg->sg_Stmt->st_ClassStmt.es_Super,
7870 sg->sg_Stmt->st_MyGroup,
7872 /*ResolveType(ad->d_StorDecl.ed_Type, NULL, 0);*/
7877 * XXX CANT RESOLVE THIS NOW, THE TYPES MIGHT NOT BE KNOWN
7878 * WELL ENOUGH YET AND WE CANNOT RESOLVE THEM AT THIS TIME.
7880 if (thisType->ty_Op == TY_REFTO &&
7881 (thisType->ty_RefType.et_Type->ty_Op == TY_CLASS ||
7882 thisType->ty_RefType.et_Type->ty_Op == TY_UNRESOLVED))
7884 int save_sqflags = thisType->ty_SQFlags;
7886 /* XXX sg_ClassList? right sg? */
7887 /* XXX correct visibility? */
7888 thisType = thisType->ty_RefType.et_Type;
7889 if (ad->d_Search == NULL)
7890 ad->d_Search = thisType->ty_ClassType.et_SemGroup;
7891 ad->d_StorDecl.ed_Type =
7892 AllocClassType(&sg->sg_ClassList,
7893 sg->sg_Stmt->st_ClassStmt.es_Super,
7894 sg->sg_Stmt->st_MyGroup,
7896 ad->d_StorDecl.ed_Type =
7897 TypeToRefType(ad->d_StorDecl.ed_Type);
7898 ad->d_StorDecl.ed_Type =
7899 TypeToQualType(ad->d_StorDecl.ed_Type, NULL,
7900 save_sqflags, NULL);
7901 /*ResolveType(ad->d_StorDecl.ed_Type, NULL, 0);*/
7902 } else if (thisType->ty_Op == TY_PTRTO &&
7903 (thisType->ty_RefType.et_Type->ty_Op == TY_CLASS ||
7904 thisType->ty_RefType.et_Type->ty_Op == TY_UNRESOLVED))
7906 int save_sqflags = thisType->ty_SQFlags;
7908 /* XXX sg_ClassList? right sg? */
7909 /* XXX correct visibility? */
7910 thisType = thisType->ty_RawPtrType.et_Type;
7911 if (ad->d_Search == NULL)
7912 ad->d_Search = thisType->ty_ClassType.et_SemGroup;
7913 ad->d_StorDecl.ed_Type =
7914 AllocClassType(&sg->sg_ClassList,
7915 sg->sg_Stmt->st_ClassStmt.es_Super,
7916 sg->sg_Stmt->st_MyGroup,
7918 ad->d_StorDecl.ed_Type =
7919 TypeToRawPtrType(ad->d_StorDecl.ed_Type);
7920 ad->d_StorDecl.ed_Type =
7921 TypeToQualType(ad->d_StorDecl.ed_Type, NULL,
7922 save_sqflags, NULL);
7923 /*ResolveType(ad->d_StorDecl.ed_Type, NULL, 0);*/
7925 fprintf(stderr, "'this' argument is not a @ref or *ptr\n");
7929 } else if (type->ty_SQFlags & SF_GMETHOD) {
7930 asg = type->ty_ProcType.et_ArgsType->ty_ArgsType.et_SemGroup;
7931 ad = RUNE_FIRST(&asg->sg_DeclList); // first arg
7933 if (ad == NULL || ad->d_Id != RUNEID_THIS) {
7934 Scope tscope = INIT_SCOPE(SCOPE_ALL_VISIBLE);
7938 dassert_decl(d, sg->sg_Type == SG_CLASS);
7941 ctype = AllocClassType(&sg->sg_ClassList,
7942 st->st_ClassStmt.es_Super,
7945 ad = AllocDeclaration(asg, DOP_TYPEDEF, &tscope);
7946 ad->d_TypedefDecl.ed_Type = ctype;
7947 ad->d_Flags |= DF_AUTOTHIS;
7948 HashDecl(ad, RUNEID_THIS);
7951 * Place at front of list
7957 * XXX CANT RESOLVE THIS NOW, THE TYPES MIGHT NOT BE KNOWN
7958 * WELL ENOUGH YET AND WE CANNOT RESOLVE THEM AT THIS TIME.
7960 thisType = ad->d_TypedefDecl.ed_Type;
7961 /*ResolveType(thisType, NULL, 0);*/
7963 dassert_decl(ad, ad->d_Id == RUNEID_THIS &&
7964 ad->d_Op == DOP_TYPEDEF);
7965 dassert_decl(ad, sg->sg_Stmt->st_Op == ST_Class);
7966 dassert_decl(ad, thisType->ty_Op == TY_CLASS);
7967 /* XXX sg_ClassList? right sg? */
7968 /* XXX correct visibility? */
7969 if (ad->d_Search == NULL)
7970 ad->d_Search = thisType->ty_ClassType.et_SemGroup;
7971 ad->d_TypedefDecl.ed_Type =
7972 AllocClassType(&sg->sg_ClassList,
7973 sg->sg_Stmt->st_ClassStmt.es_Super,
7974 sg->sg_Stmt->st_MyGroup,
7976 /*ResolveType(ad->d_StorDecl.ed_Type, NULL, 0);*/