2 * RESOLVE.C - Resolve the parser tree and prepare for code generation or
5 * (c)Copyright 1993-2016, Matthew Dillon, All Rights Reserved. See the
6 * COPYRIGHT file at the base of the distribution.
8 * Pass1 - ResolveClasses() - Handles superclass/subclass merging for the
11 * Pass2 - Resolve*() - Resolves identifiers and identifier paths, plus
12 * the size and alignment for Types, Decls, and SemGroups.
14 * Utilizes a deferred work mechanic to avoid circular loops. This mechanism
15 * allows types to be partially resolved (enough to satisfy the caller), then
16 * finishes up via the deferred work queue.
24 static void ResolveClasses(Stmt *st, int flags);
25 static void ResolveAlignment(Stmt *st, int flags);
26 static void ResolveStorage(Stmt *st, int flags);
27 static void ResolveSemGroup(SemGroup *sg, int retry);
28 static void errorDottedId(runeid_t *ary, const char *ctl,...);
30 static void ResolveStmt(SemGroup *isg, Stmt *st, int flags);
31 static Type *ResolveType(Type *type, struct ResVis *vis, int retry);
32 static void ResolveDecl(Declaration *d, int retry);
33 static Exp *ResolveExp(SemGroup *isg, SemGroup *sg,
34 Exp *exp, Type *itype, int flags);
36 static Type *resolveReturnType(SemGroup *sg, int flags);
37 static Type *resolveArgsType(SemGroup *sg, int flags);
38 static Exp *resolveConstExp(SemGroup *isg, SemGroup *sg, Exp *exp, int flags);
39 static Exp *resolveConstExpBool(SemGroup *isg, SemGroup *sg, Exp *exp,
40 int flags, TmpData *ts);
41 static Exp *resolveCompoundExp(SemGroup *isg, SemGroup *sg,
42 Exp *exp, Type *itype, int flags);
43 static Exp *resolveBracketedExp(SemGroup *isg, SemGroup *sg,
44 Exp *exp, Type *itype, int flags);
45 static Exp *resolveExpCast(SemGroup *isg, SemGroup *sg,
46 Exp *exp, Type *ltype, int flags);
47 static Exp *resolveExpOper(SemGroup *isg, SemGroup *sg,
48 Exp *exp, Type *itype, int flags);
49 static void resolveUnresClass(Type *super);
51 static void resolveDeclAlign(Declaration *d, urunesize_t *expalignp, int flags);
52 static void resolveExpAlign(Exp *exp, urunesize_t *expalignp, int flags);
53 static void resolveTypeAlign(Type *type, urunesize_t *expalignp, int flags);
54 static void resolveSemGroupAlign(SemGroup *sg, int flags);
56 static void resolveDeclStorage(Declaration *d, int flags,
57 urunesize_t base, urunesize_t *limitp,
58 urunesize_t gbase, urunesize_t *glimitp);
59 static void resolveExpOnlyStorage(Exp *exp, int flags,
60 urunesize_t base, urunesize_t *limitp);
61 static void resolveExpSubStorage(Exp *exp, int flags,
62 urunesize_t base, urunesize_t *limitp);
63 static void resolveExpStorage(Exp *exp, int flags,
64 urunesize_t base, urunesize_t *limitp);
66 static Declaration *findOper(Type *btype, runeid_t id,
67 Type *ltype, Type *rtype, int flags);
68 static Declaration *findExpOper(Exp *exp, int flags);
69 static Declaration *findCast(Type *btype, Type *ltype, Type *rtype, int flags);
70 static void resolveTypeStorage(Type *type, int flags,
71 urunesize_t base, urunesize_t *limitp);
72 static void resolveSemGroupStorage(SemGroup *sg, int flags,
73 urunesize_t base, urunesize_t *limitp,
74 urunesize_t gbase, urunesize_t *glimitp);
76 static void resolveProcedureInline(SemGroup *isg, SemGroup *sg,
78 static void resolveDynamicProcedure(SemGroup *isg, SemGroup *sg,
80 static void resolveDynamicProcedureAlign(Exp *exp,
81 urunesize_t *expalignp, int flags);
82 static void resolveDynamicProcedureStorage(Exp *exp, int flags,
83 urunesize_t base, urunesize_t *limitp,
84 urunesize_t gbase, urunesize_t *glimitp);
86 static int SpecialSemGroupGet(runeid_t id);
87 static void ResolveMethodProcedureThisArg(SemGroup *subsg, Declaration *d);
88 static void checkUnrestrictedType(Declaration *d, Type *type);
91 * Adjust type to be lvalue but do not modify its relative context for
94 #define ADD_LVALUE(type) \
95 ResolveType(AddTypeQual((type), SF_LVALUE), NULL, 0)
96 #define DEL_LVALUE(type) \
97 ResolveType(DelTypeQual((type), SF_LVALUE), NULL, 0)
98 #define DEL_LVALUE_CONST(type) \
99 ResolveType(DelTypeQual((type), SF_LVALUE | SF_CONST), NULL, 0)
101 #define RESOLVE_AUTOCAST 0x0001 /* autocast to expected type */
102 #define RESOLVE_UNUSED0002 0x0002
103 #define RESOLVE_CLEAN 0x0004 /* cleanup after const interp */
104 #define RESOLVE_FAILOK 0x0008 /* cleanup after const interp */
105 #define RESOLVE_ISGLOB 0x0010
106 #define RESOLVE_FINALIZE 0x0020
108 #define BASEALIGN(base, alignmask) \
109 (((base) + alignmask) & ~(urunesize_t)(alignmask))
111 #define SIZELIMIT(base, bytes, limitp) \
112 if ((base) + (bytes) > *(limitp)) \
113 *(limitp) = ((base) + (bytes))
115 #define ResolveTypeSimple(type) ResolveType((type), NULL, 0)
118 * Deferred work queue
120 typedef Type * type_p;
123 typedef struct ResVis {
128 typedef struct ResDefer {
129 struct ResDefer *next;
131 RES_STMT, RES_DECL, RES_TYPE, RES_EXP, RES_SEMGROUP
159 static resdelay_t *ResDeferBase;
160 static resdelay_t **ResDeferTail = &ResDeferBase;
163 int RuneInlineComplexity = 20;
166 * Do a pass on all deferred work. Returns non-zero if there is more
167 * deferred work after the pass is complete.
171 runDeferredWork(void)
174 resdelay_t **last = ResDeferTail;
178 while ((res = ResDeferBase) != NULL) {
179 if ((ResDeferBase = res->next) == NULL)
180 ResDeferTail = &ResDeferBase;
181 switch (res->which) {
183 ResolveStmt(res->stmt.isg, res->stmt.st, res->stmt.flags);
186 ResolveDecl(res->decl.d, 1);
189 type = ResolveType(res->type.type, NULL, 1);
190 dassert(type == res->type.type);
193 exp = ResolveExp(res->exp.isg, res->exp.sg,
194 res->exp.exp, res->exp.itype,
196 dassert(exp == res->exp.exp);
199 ResolveSemGroup(res->sg.sg, 1);
205 zfree(res, sizeof(*res));
206 if (&res->next == last) /* storage freed, ok to test ptr */
209 return (ResDeferBase != NULL);
215 deferStmt(SemGroup *isg, Stmt *st, int flags)
219 res = zalloc(sizeof(*res));
220 res->which = RES_STMT;
223 res->stmt.flags = flags;
225 ResDeferTail = &res->next;
231 deferDecl(Declaration *d)
235 res = zalloc(sizeof(*res));
236 res->which = RES_DECL;
239 ResDeferTail = &res->next;
245 deferExp(SemGroup *isg, SemGroup *sg, Exp *exp, Type *itype, int flags)
249 res = zalloc(sizeof(*res));
250 res->which = RES_EXP;
254 res->exp.itype = itype;
255 res->exp.flags = flags;
257 ResDeferTail = &res->next;
261 * Note that visibility is set immediately by the call chain, NOT in any
266 deferType(Type *type)
270 res = zalloc(sizeof(*res));
271 res->which = RES_TYPE;
272 res->type.type = type;
274 ResDeferTail = &res->next;
280 deferSG(SemGroup *sg)
284 res = zalloc(sizeof(*res));
285 res->which = RES_SEMGROUP;
288 ResDeferTail = &res->next;
292 ResolveProject(Parse *p, Stmt *st)
300 dassert_stmt(st, st->st_Op == ST_Import);
303 * Interpreter or Generator may reference our global internal types
304 * directly, so make sure they are all resolved.
306 ResolveClasses(st, 0);
307 for (i = 0; BaseTypeAry[i]; ++i)
308 ResolveType(BaseTypeAry[i], NULL, 0);
311 main_st = RUNE_FIRST(&st->st_List);
312 d = FindDeclId(main_st->st_MyGroup, id, &eno);
315 fprintf(stderr, "Top-level module missing main()\n");
318 dasserts_stmt(main_st, d->d_Op == DOP_PROC, "main() is not a procedure!\n");
321 * Resolve all dependencies on main (basically everything used by the
322 * project is resolved).
326 main_st = d->d_ProcDecl.ed_ProcBody;
332 while (runDeferredWork())
336 * Resolve all types registered by DLLs
341 RUNE_FOREACH(tr, &TypeRegList, tr_Node)
342 ResolveType(tr->tr_Type, NULL, 0);
346 * This runs through and resolves the alignment and storage for
347 * everything that has been primarily resolved above.
349 ResolveAlignment(st, 0);
350 ResolveAlignment(st, RESOLVE_FINALIZE);
351 resolveDeclAlign(d, &d->d_MyGroup->sg_TmpAlignMask, 0);
352 resolveDeclAlign(d, &d->d_MyGroup->sg_TmpAlignMask, RESOLVE_FINALIZE);
354 ResolveStorage(st, 0);
355 ResolveStorage(st, RESOLVE_FINALIZE);
364 sg = d->d_ImportSemGroup;
365 base = sg->sg_TmpBytes;
366 gbase = sg->sg_GlobalTmpBytes;
370 for (i = 0; BaseTypeAry[i]; ++i) {
371 Type *type = BaseTypeAry[i];
372 urunesize_t expalign = 0;
374 resolveTypeAlign(type, &expalign, 0);
375 resolveTypeAlign(type, &expalign, RESOLVE_FINALIZE);
376 resolveTypeStorage(type, 0, 0, &glimit);
377 resolveTypeStorage(type, RESOLVE_FINALIZE, 0, &glimit);
379 if (type->ty_Op == TY_CLASS) {
380 resolveSemGroupStorage(type->ty_ClassType.et_SemGroup, 0, 0, NULL, 0, NULL);
381 resolveSemGroupStorage(type->ty_ClassType.et_SemGroup, RESOLVE_FINALIZE, 0, NULL, 0, NULL);
382 resolveSemGroupAlign(type->ty_ClassType.et_SemGroup, 0);
383 resolveSemGroupAlign(type->ty_ClassType.et_SemGroup, RESOLVE_FINALIZE);
388 resolveDeclStorage(d, 0, base, &limit, gbase, &glimit);
389 resolveDeclStorage(d, RESOLVE_FINALIZE, base, &limit, gbase, &glimit);
392 p->p_Format = PFMT_RESOLVED;
398 * ResolveClasses() - Resolve superclasses and do class merge
400 * This code implements the most complex feature of the language: subclassing
403 * The hardest thing we have to do is 'dup' declarations and code in order to
404 * implement subclassing and refinement. For example, a procedure defined in
405 * Integer must be dup'd for each subclass of Integer. We have to do this
406 * because storage requirements will change due to both subclassing and
407 * refinement. Even auto variables may wind up with different types between
408 * superclass and subclass.
410 * We must scan ST_Import and ST_Class statements.
414 ResolveClasses(Stmt *st, int flags)
416 SemGroup *sg = st->st_MyGroup;
419 * Resolver interlock. Assert that we aren't looping. A loop can occur
420 * if class A embeds class B and class B embeds class A (verses a pointer
423 dassert_stmt(st, (st->st_Flags & STF_RESOLVING) == 0);
424 if (st->st_Flags & STF_RESOLVED)
426 st->st_Flags |= STF_RESOLVING;
429 * If this is a subclass, integrate the superclass into it
431 if (st->st_Op == ST_Class && st->st_ClassStmt.es_Super) {
432 Type *super = st->st_ClassStmt.es_Super;
440 * Locate the superclass. 'super' does not appear in any other
441 * list.. this is a unique Type structure.
443 dassert_stmt(st, super->ty_Op == TY_UNRESOLVED);
445 resolveUnresClass(super);
446 } while (super->ty_Op == TY_UNRESOLVED);
448 dassert_stmt(st, super->ty_Op == TY_CLASS);
451 * Cleanup (XXX free qualified segments??)
453 st->st_ClassStmt.es_Super = super;
454 st->st_ClassStmt.es_Decl->d_ClassDecl.ed_Super = super;
457 * Inherit internal unsigned integer and floating point flags and a
460 sg->sg_Flags |= super->ty_ClassType.et_SemGroup->sg_Flags &
461 (SGF_ISINTEGER | SGF_ISUNSIGNED |
462 SGF_ISFLOATING | SGF_ISBOOL |
463 SGF_HASASS | SGF_GHASASS |
464 SGF_HASLVREF | SGF_GHASLVREF |
465 SGF_HASPTR | SGF_GHASPTR |
469 * The subclass's unrestricted scope (or not), must match the
470 * super-class. Otherwise methods pulled-down from the superclass
471 * might not be compatible with the subclass.
473 if ((sg->sg_ClassType->ty_SQFlags ^
474 super->ty_ClassType.et_SemGroup->sg_ClassType->ty_SQFlags) &
477 dwarn_stmt(sg->sg_Stmt, 0, "subclass %08x",
478 sg->sg_ClassType->ty_SQFlags);
479 dfatal_stmt(super->ty_ClassType.et_SemGroup->sg_Stmt,
480 TOK_ERR_CLASS_STRUCT_COMPAT,
483 super->ty_ClassType.et_SemGroup->
484 sg_ClassType->ty_SQFlags);
488 * Locate the class statement associated with the superclass and
491 sst = super->ty_ClassType.et_SemGroup->sg_Stmt;
492 dassert(sst->st_MyGroup == super->ty_ClassType.et_SemGroup);
493 dassert_stmt(st, sst != NULL);
494 dassert_stmt(st, sst->st_Op == ST_Class);
496 ResolveClasses(sst, flags);
499 * Assign the sg_Level for the subclass. This is used for semantic
500 * searches when a subclass is passed to a procedure expecting the
503 sg->sg_Level = sst->st_MyGroup->sg_Level + 1;
506 * XXX Subclasses can inherit locking scope here. Currently we do
510 if (sst->u.ClassStmt.es_Decl->d_ScopeFlags & SCOPE_HARD) {
511 st->u.ClassStmt.es_Decl->d_ScopeFlags |= SCOPE_HARD;
512 } else if (st->u.ClassStmt.es_Decl->d_ScopeFlags & SCOPE_HARD) {
513 dfatal_stmt(st, TOK_ERR_ILLEGAL_LOCKING_REFINEMENT, NULL);
518 * First move all the declarations from sg to tsg so we can merge the
519 * superclass properly (keep all the d_Index's correct). Note that
520 * tsg is not 100% integrated so we can only use it for search
521 * purposes. We absolutely CANNOT DupDeclaration() into tsg!
523 tsg = AllocSemGroup(SG_CLASS, sg->sg_Parse, NULL, sg->sg_Stmt);
525 while ((d = RUNE_FIRST(&sg->sg_DeclList)) != NULL) {
526 if (d->d_Id == RUNEID__T)
532 * If our sub-class does not have a _t type, then automatically
535 * Add to sg then rename so the declaration is properly initialized
536 * for sg (e.g. fields like d_Level).
539 Scope scope = INIT_SCOPE(SCOPE_REFINE);
541 d = AllocDeclaration(sg, DOP_TYPEDEF, &scope);
542 d->d_TypedefDecl.ed_Type = sg->sg_ClassType;
544 AllocClassType(&sg->sg_ClassList, super,
545 sg->sg_Stmt->st_MyGroup, SCOPE_ALL_VISIBLE);
547 HashDecl(d, RUNEID__T);
552 * Reset count (index counter)
554 sg->sg_DeclCount = 0;
557 * Merge the superclass into this class, in sequence. Iterate through
558 * declarations in the superclass and pull them into the subclass.
559 * Figure out compatibility between super and subclasses.
561 * d - iterates the superclass nd - subclass declaration refining
562 * the superclass decl
564 RUNE_FOREACH(d, &sst->st_MyGroup->sg_DeclList, d_Node) {
568 dassert(d->d_Level != NULL && d->d_Level->sg_Level < sg->sg_Level);
571 * See if the superclass decl conflicts with a subclass decl. If
572 * there is no conflict pull it into the subclass and adjust the
573 * visibility. Note that the superclass may have duplicate ids,
574 * but they will be at different levels if so.
576 * The super linkage is required when findDecl() checks
577 * visibility of a declaration hidden relative to our subclass,
578 * but not necessarily hidden relative to the superclass.
580 * Set d_Search to the (ultimate) superclass when inheriting
581 * elements from the superclass. d_Search is not set for
582 * refinements or extensions.
586 rd = FindDeclRefineId(tsg, d->d_Id, &eno);
588 /* XXX proliferates decls/types? */
589 nd = DupDeclaration(sg, d);
590 dassert(d->d_Index == nd->d_Index);
591 nd->d_ScopeFlags &= ~SCOPE_ALL_VISIBLE | super->ty_Visibility;
592 nd->d_ScopeFlags &= ~SCOPE_REFINE;
593 if (nd->d_Search == NULL)
594 nd->d_Search = sst->st_MyGroup;
597 * Superclass decl is brought in unrefined (though it might
598 * be an implied refinement depending on side-effects).
600 nd->d_SubNext = d->d_SubBase;
607 * If there is a conflict and we are not refining the superclass
608 * entity, then pull in the superclass entity and make it
609 * invisible to sg_Level searches. This could bring in multiple
610 * levels of the same id.
612 * Note that this may result in multiple ids, but they will be at
613 * different levels. In this case rd will be at the current
614 * level and nd will be at some prior level.
616 * Order is important here.
618 if ((rd->d_ScopeFlags & SCOPE_REFINE) == 0) {
619 /* XXX proliferates decls/types? */
620 nd = DupDeclaration(sg, d);
621 dassert(d->d_Index == nd->d_Index);
622 nd->d_ScopeFlags &= ~(SCOPE_ALL_VISIBLE | SCOPE_REFINE);
623 if (nd->d_Search == NULL)
624 nd->d_Search = sst->st_MyGroup;
627 * Superclass decl is brought in unrefined (though it might
628 * be an implied refinement depending on side-effects).
630 nd->d_SubNext = d->d_SubBase;
637 * Ok, we need to refine. But the superclass may contain
638 * multiple levels of the same id. We only refine the one that
639 * is visible to us. None of these other declarations will be at
642 if ((d->d_ScopeFlags & SCOPE_ALL_VISIBLE) == 0) {
643 nd = DupDeclaration(sg, d);
644 dassert(d->d_Index == nd->d_Index);
645 nd->d_ScopeFlags &= ~(SCOPE_ALL_VISIBLE |
647 if (nd->d_Search == NULL)
648 nd->d_Search = sst->st_MyGroup;
651 * Superclass decl is brought in unrefined (though it might
652 * be an implied refinement depending on side-effects).
654 nd->d_SubNext = d->d_SubBase;
661 * Whew! Finally, we found the superclass decl that we wish to
662 * refine. We had better not have already refined it or there's
663 * something wrong with the algorithm.
665 * Since we inherit the superclass method's level our method will
666 * run in the superclass instead of the original, but d_Super
667 * still must be set for findDecl() to track down visibility
668 * relative to the superclass methods.
670 * Do not set d_Search for refinement overrides, the context
671 * for method lookups should be the subclass, not the superclass
675 dassert_decl(rd, rd->d_Super == NULL);
676 dassert(d->d_Index == rd->d_Index);
677 rd->d_Level = d->d_Level; /* XXX */
681 * super->subclass(es) list
683 rd->d_SubNext = d->d_SubBase;
687 * This is for the super.field special case method access below.
689 * XXX This brings in lots of extra procedures that we really
690 * should pare down, but we don't know which ones will
691 * be accessed in pass1.
693 if (d->d_Op == DOP_PROC) {
694 d->d_Flags |= DF_SUPERCOPY;
698 * Inherit scope from the superclass if it is not specified in
699 * the REFINE declaration (see AllocDeclaration).
701 if ((rd->d_ScopeFlags & SCOPE_ALL_VISIBLE) == 0) {
702 rd->d_ScopeFlags |= d->d_ScopeFlags & SCOPE_ALL_VISIBLE;
707 * Deal with any remaining elements in tsg. These are 'extensions'
708 * to the superclass. There may also be invisible DOP_PROC's to
709 * handle the special superclass method call case descibed above.
711 while ((rd = RUNE_FIRST(&tsg->sg_DeclList)) != NULL) {
712 if (rd->d_ScopeFlags & SCOPE_REFINE) {
713 if (rd->d_Super == NULL) {
714 char buf[RUNE_IDTOSTR_LEN];
715 fprintf(stderr, "Unable to refine %s, it does not exist "
717 runeid_text(rd->d_Id, buf));
727 * Pull in any methods from the superclass that the subclass
728 * explicitly accesses via super.func(). As before, we have
729 * to set d_Search for the visibility context when resolving
730 * these procedures (XXX).
732 * We have to special case super.method() for a refined method.
733 * Normally this makes the original method inaccessible (for
734 * storage), but we want it to work for a procedure so we make a copy
735 * in tsg. (we can't dup it directly into sg because it will screw
738 * We must not only clear the scope visibility and the temporary
739 * refine flag, we also have to clear constructor/destructor scope in
740 * the copy so only the refined constructor/destructor is called, not
741 * both the refined and the superclass constructor/destructor.
743 * Also fixup arguments for method procedures. We will set
744 * d_Search for the 'this' argument.
746 RUNE_FOREACH(d, &sst->st_MyGroup->sg_DeclList, d_Node) {
749 if (d->d_Flags & DF_SUPERCOPY) {
750 d->d_Flags &= ~DF_SUPERCOPY;
751 nd = DupDeclaration(sg, d);
752 nd->d_ScopeFlags &= ~(SCOPE_ALL_VISIBLE |
756 if (nd->d_Search == NULL)
757 nd->d_Search = sst->st_MyGroup;
760 } else if (st->st_Op == ST_Class) {
767 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
768 if (d->d_Id == RUNEID__T) {
775 * If our class does not have a _t type, then automatically
776 * add it in. This is not a sub-class so do not scope it
780 Scope scope = INIT_SCOPE(0);
782 d = AllocDeclaration(sg, DOP_TYPEDEF, &scope);
783 d->d_TypedefDecl.ed_Type = sg->sg_ClassType;
785 AllocClassType(&sg->sg_ClassList, NULL,
786 sg->sg_Stmt->st_MyGroup, SCOPE_ALL_VISIBLE);
788 HashDecl(d, RUNEID__T);
793 * Fixup the method procedures
795 if (st->st_Op == ST_Class) {
798 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
799 if (d->d_Op == DOP_PROC &&
800 (d->d_ProcDecl.ed_Type->ty_SQFlags & (SF_METHOD | SF_GMETHOD)))
802 ResolveMethodProcedureThisArg(sg, d);
807 st->st_Flags &= ~STF_RESOLVING;
808 st->st_Flags |= STF_RESOLVED;
811 * If this is an ST_Import we must recurse through it. The only
812 * statements under an Import should be Modules. Well, really just one
813 * module. And under that module we only care about ST_Import and
814 * ST_Class statements.
816 * If this is a shared import the statement list will be empty (later it
817 * may be used for import refinement, I dunno). This is what we want
818 * since we should only resolve a shared import once.
820 if (st->st_Op == ST_Import) {
823 RUNE_FOREACH(scan, &st->st_List, st_Node) {
826 dassert_stmt(scan, scan->st_Op == ST_Module);
827 RUNE_FOREACH(scan2, &scan->st_List, st_Node) {
828 if (scan2->st_Op == ST_Import || scan2->st_Op == ST_Class) {
829 ResolveClasses(scan2, flags);
833 if (st->st_ImportStmt.es_DLL) {
836 func = dlsym(st->st_ImportStmt.es_DLL, "resolveClasses");
844 * ResolveStmt() - Resolve all types, declarations, and semantic refs
846 * Resolves all types, declarations, and identifiers. Additionally this
847 * function resolves intermediate types for expressions. Storage sizes are
848 * resolved but offsets are not assigned to declarations.
850 * Returns a complexity count.
854 ResolveStmt(SemGroup *isg, Stmt *st, int flags)
857 * Nothing to do if we have already resolved this statement
859 dassert_stmt(st, (st->st_Flags & STF_RESOLVING) == 0);
860 if (st->st_Flags & STF_RESOLVED)
862 st->st_Flags |= STF_RESOLVING;
865 * Process whether we detached as a thread already or not.
868 st->st_Flags |= st->st_Parent->st_Flags & STF_DIDRESULT;
871 * Deal with unresolved types here
873 * If this is an executable layer, flag the SG as resolved.
874 * Note that ResolveSemGroup is never called on executable layers,
875 * they are handled by ResolveStmt() and ResolveDecl().
877 if (st->st_Flags & STF_SEMANTIC) {
878 SemGroup *sg = st->st_MyGroup;
881 sg->sg_Flags |= SGF_RESOLVED;
882 RUNE_FOREACH(type, &sg->sg_ClassList, ty_Node) {
883 if (type->ty_Op == TY_UNRESOLVED) {
884 resolveUnresClass(type);
890 * Resolve statements. Don't worry about declarations, those are handled
896 * This will just flag the import declaration as resolved so the code
897 * generator dives it for generation.
899 if (st->st_ImportStmt.es_Decl)
900 ResolveDecl(st->st_ImportStmt.es_Decl, 0);
904 * Recursively resolve contents
906 * COMMENTED OUT - Unecessary, causes excessive realization of
907 * library elements, etc.
910 /* if (isg == NULL || (isg->sg_Flags & SGF_ENTRY)) */ {
913 RUNE_FOREACH(scan, &st->st_List, st_Node) {
915 * XXX pass isg for import, st_MyGroup for module??
917 ResolveStmt(st->st_MyGroup, scan, flags);
919 if (st->st_Op == ST_Import && st->st_ImportStmt.es_DLL) {
921 dlsym(st->st_ImportStmt.es_DLL, "resolveTypes");
930 * COMMENTED OUT - Unecessary, causes excessive realization of
931 * library elements, etc.
934 ResolveDecl(st->st_ClassStmt.es_Decl, 0);
938 ResolveDecl(st->st_TypedefStmt.es_Decl, 0);
942 * Resolve declarations, skipping any whos context was moved to a
943 * class (e.g. a declaration at the top level of a file like
944 * Fd.setfd(...) also exists in the Fd class).
947 Declaration *d = st->st_DeclStmt.es_Decl;
950 for (i = 0; i < st->st_DeclStmt.es_DeclCount; ++i) {
951 if (st->st_MyGroup == d->d_MyGroup)
953 d = RUNE_NEXT(d, d_Node);
961 RUNE_FOREACH(scan, &st->st_List, st_Node) {
962 ResolveStmt(isg, scan, flags);
969 if (st->st_LoopStmt.es_Init)
970 ResolveStmt(isg, st->st_LoopStmt.es_Init, flags);
971 if (st->st_LoopStmt.es_BCond) {
973 * NOTE: BoolType global implies an rvalue.
975 st->st_LoopStmt.es_BCond =
976 ResolveExp(isg, st->st_MyGroup,
977 st->st_LoopStmt.es_BCond,
978 &BoolType, RESOLVE_AUTOCAST);
980 if (st->st_LoopStmt.es_ACond) {
982 * NOTE: BoolType global implies an rvalue.
984 st->st_LoopStmt.es_ACond =
985 ResolveExp(isg, st->st_MyGroup,
986 st->st_LoopStmt.es_ACond,
987 &BoolType, RESOLVE_AUTOCAST);
989 if (st->st_LoopStmt.es_AExp) {
991 * NOTE: VoidType global implies an rvalue.
993 st->st_LoopStmt.es_AExp = ResolveExp(isg, st->st_MyGroup,
994 st->st_LoopStmt.es_AExp,
995 &VoidType, RESOLVE_AUTOCAST);
999 * Procedure bodies are not resolved here. We avoid resolving
1000 * the body until the last possible moment.
1002 * The procedure body will be resolved in ResolveDecl()'s DOP_PROC
1005 if (st->st_LoopStmt.es_Body) {
1006 ResolveStmt(isg, st->st_LoopStmt.es_Body, flags);
1015 * NOTE: BoolType global implies an rvalue.
1017 st->st_IfStmt.es_Exp = ResolveExp(isg, st->st_MyGroup,
1018 st->st_IfStmt.es_Exp,
1019 &BoolType, RESOLVE_AUTOCAST);
1020 ResolveStmt(isg, st->st_IfStmt.es_TrueStmt, flags);
1021 if (st->st_IfStmt.es_FalseStmt)
1022 ResolveStmt(isg, st->st_IfStmt.es_FalseStmt, flags);
1026 * NOTE: lvalue/rvalue depends on return type.
1028 st->st_RetStmt.es_ProcRetType =
1029 resolveReturnType(st->st_MyGroup, flags);
1030 if (st->st_RetStmt.es_Exp) {
1031 if (st->st_Flags & STF_DIDRESULT)
1032 dfatal_stmt(st, TOK_ERR_RESULT_SEQUENCING, NULL);
1033 st->st_RetStmt.es_Exp =
1034 ResolveExp(isg, st->st_MyGroup,
1035 st->st_RetStmt.es_Exp,
1036 st->st_RetStmt.es_ProcRetType,
1042 * NOTE: lvalue/rvalue depends on return type.
1044 if (st->st_Flags & STF_DIDRESULT)
1045 dfatal_stmt(st, TOK_ERR_RESULT_SEQUENCING, NULL);
1046 if ((st->st_Parent->st_Flags & STF_SEMTOP) == 0)
1047 dfatal_stmt(st, TOK_ERR_RESULT_SEQUENCING, NULL);
1048 st->st_ResStmt.es_ProcRetType =
1049 resolveReturnType(st->st_MyGroup, flags);
1050 if (st->st_ResStmt.es_Exp) {
1051 st->st_ResStmt.es_Exp = ResolveExp(isg, st->st_MyGroup,
1052 st->st_ResStmt.es_Exp,
1053 st->st_ResStmt.es_ProcRetType,
1058 * Flag that we executed result;
1062 for (scan = st; scan; scan = scan->st_Parent) {
1063 scan->st_Flags |= STF_DIDRESULT;
1064 scan->st_MyGroup->sg_Flags |= SGF_DIDRESULT;
1065 if (scan->st_Flags & STF_SEMTOP)
1072 * NOTE: Switch type must be an rvalue.
1074 * NOTE: It is possible to switch on a type. See ST_Case below for
1077 st->st_SwStmt.es_Exp->ex_Flags |= EXF_REQ_TYPE;
1078 st->st_SwStmt.es_Exp = ResolveExp(isg, st->st_MyGroup,
1079 st->st_SwStmt.es_Exp,
1084 * Switch-on-expression() expects an rvalue.
1086 if ((st->st_SwStmt.es_Exp->ex_Flags & EXF_RET_TYPE) == 0) {
1087 st->st_SwStmt.es_Exp->ex_Type =
1088 DEL_LVALUE(st->st_SwStmt.es_Exp->ex_Type);
1094 RUNE_FOREACH(scan, &st->st_List, st_Node) {
1095 ResolveStmt(isg, scan, flags);
1101 * Handle a case/default. Note that when switching on a type, each
1102 * case expression must return a type.
1104 * NOTE: Case type must be an rvalue. We use the switch type to
1105 * cast, so it will be.
1113 * Set type to cast cases to if we are switching on an
1114 * expression, otherwise we are switching on a type and should
1115 * not try to coerce the cases (it doesn't make sense to).
1117 dassert_stmt(st, st->st_Parent->st_Op == ST_Switch);
1118 if (st->st_Parent->st_SwStmt.es_Exp->ex_Flags & EXF_RET_TYPE)
1121 type = st->st_Parent->st_SwStmt.es_Exp->ex_Type;
1124 * case: (if es_Exp is NULL, this is a default: )
1126 if ((exp = st->st_CaseStmt.es_Exp) != NULL) {
1128 exp->ex_Flags |= EXF_REQ_TYPE;
1129 exp = ResolveExp(isg, st->st_MyGroup,
1130 exp, type, RESOLVE_AUTOCAST);
1132 dassert(exp->ex_Flags & EXF_RET_TYPE);
1133 st->st_CaseStmt.es_Exp = exp;
1137 * Elements of the case/default
1139 RUNE_FOREACH(scan, &st->st_List, st_Node) {
1140 ResolveStmt(isg, scan, flags);
1146 * NOTE: VoidType global implies an rvalue.
1148 * NOTE: If ResolveExp() doesn't cast to void for us, we will do it
1154 exp = ResolveExp(isg, st->st_MyGroup,
1155 st->st_ExpStmt.es_Exp,
1156 &VoidType, RESOLVE_AUTOCAST);
1157 if (exp->ex_Type != &VoidType) {
1158 exp = resolveExpCast(isg, st->st_MyGroup,
1159 exp, &VoidType, flags);
1161 st->st_ExpStmt.es_Exp = exp;
1168 RUNE_FOREACH(scan, &st->st_List, st_Node) {
1169 ResolveStmt(isg, scan, flags);
1173 case ST_ThreadSched:
1176 dassert_stmt(st, 0);
1180 * Calculate and propagate complexity upward.
1185 if ((sg = st->st_MyGroup) != NULL) {
1186 ++sg->sg_Complexity;
1187 if ((st->st_Flags & STF_SEMTOP) == 0 &&
1189 RUNE_NEXT(st, st_Node) == NULL)
1191 sg->sg_Parent->sg_Complexity += sg->sg_Complexity;
1195 * Head of procedure needs to know if any ABI calls will be made
1196 * so it can reserve stack space.
1198 if ((st->st_Flags & STF_SEMTOP) == 0 && sg->sg_Parent) {
1199 sg->sg_Parent->sg_Flags |= sg->sg_Flags & SGF_ABICALL;
1204 st->st_Flags |= STF_RESOLVED;
1205 st->st_Flags &= ~STF_RESOLVING;
1209 * Locate the ST_Proc statement and resolve & return its return type
1213 resolveReturnType(SemGroup *sg, int flags __unused)
1220 * Locate the ST_Proc statement
1222 while (sg && (sg->sg_Stmt == NULL || sg->sg_Stmt->st_Op != ST_Proc))
1224 dassert(sg != NULL);
1226 d = st->st_ProcStmt.es_Decl; /* decl is already resolved */
1227 dassert_decl(d, d->d_Op == DOP_PROC);
1228 dassert_decl(d, d->d_Flags & (DF_RESOLVING | DF_RESOLVED));
1229 type = d->d_ProcDecl.ed_Type;
1230 dassert_decl(d, type->ty_Op == TY_PROC);
1231 return (type->ty_ProcType.et_RetType);
1235 resolveArgsType(SemGroup *sg, int flags __unused)
1242 * Locate the ST_Proc statement
1244 while (sg && (sg->sg_Stmt == NULL || sg->sg_Stmt->st_Op != ST_Proc))
1246 dassert(sg != NULL);
1248 d = st->st_ProcStmt.es_Decl; /* decl is already resolved */
1249 dassert_decl(d, d->d_Op == DOP_PROC);
1250 dassert_decl(d, d->d_Flags & (DF_RESOLVING | DF_RESOLVED));
1251 type = d->d_ProcDecl.ed_Type;
1252 dassert_decl(d, type->ty_Op == TY_PROC);
1253 return (type->ty_ProcType.et_ArgsType);
1257 * ResolveDecl() - resolve a declaration
1259 * If the declaration represents a procedure argument, special processing of
1260 * LVALUE scope is required to pass the declaration by reference instead of
1261 * by value. Note that the size of the underlying type DOES NOT CHANGE... it
1262 * may be much larger.
1264 * NOTE: We do not resolve d_Offset here.
1268 ResolveDecl(Declaration *d, int retry)
1272 SemGroup *sg = NULL;
1276 * Recursion detection
1278 if (d->d_Flags & DF_RESOLVED)
1280 if (d->d_Flags & DF_RESOLVING) {
1284 d->d_Flags |= DF_RESOLVING;
1287 * Resolve according to the kind of declaration
1291 if (d->d_ClassDecl.ed_Super)
1292 ResolveType(d->d_ClassDecl.ed_Super, NULL, 0);
1293 sg = d->d_ClassDecl.ed_SemGroup;
1294 ResolveSemGroup(sg, 0);
1295 if (sg->sg_Flags & SGF_RESOLVED) {
1296 d->d_Bytes = d->d_ClassDecl.ed_SemGroup->sg_Bytes;
1297 d->d_AlignMask = d->d_ClassDecl.ed_SemGroup->sg_AlignMask;
1303 * Alias access is a barrier and always returns an rvalue.
1305 * DupExp is absolutely required due to the alias's target context
1306 * being different for each consumer.
1308 type = ResolveType(d->d_AliasDecl.ed_Type, NULL, 0);
1309 if (type->ty_Flags & TF_RESOLVED)
1311 if (d->d_AliasDecl.ed_OrigAssExp) {
1312 d->d_AliasDecl.ed_AssExp =
1313 DupExp(d->d_MyGroup, d->d_AliasDecl.ed_OrigAssExp);
1314 d->d_AliasDecl.ed_AssExp =
1315 ResolveExp(d->d_ImportSemGroup, d->d_MyGroup,
1316 d->d_AliasDecl.ed_AssExp,
1322 d->d_Flags |= DF_RESOLVED; /* XXX */
1323 type = ResolveType(d->d_TypedefDecl.ed_Type, NULL, 0);
1324 d->d_Flags &= ~DF_RESOLVED;
1325 if (type->ty_Flags & DF_RESOLVED)
1330 * This only occurs when resolving an import's semantic group. Since
1331 * we are scanning statements in that context we do not have to
1332 * recurse here, ResolveStmt() will do it for us.
1338 * XXX global procedure, later on, make the argument a type instead
1341 * Avoid a circular loop failure when the procedure declaration
1342 * references the class it is defined in by marking the resolve
1343 * complete even if the type isn't. We can do this because the
1344 * procedure takes no field storage.
1346 ResolveType(d->d_ProcDecl.ed_Type, NULL, 0);
1350 * Deal with constructor/destructor chaining. The chaining winds up
1351 * being reversed and will be corrected by the caller.
1353 * NOTE: Constructors and destructors might be referenced without the
1354 * entire SG being resolved, so be sure to set the ABI flags here.
1356 if (d->d_ScopeFlags & SCOPE_GLOBAL) {
1357 if ((d->d_Flags & DF_ONGLIST) == 0 &&
1358 (d->d_ScopeFlags & (SCOPE_CONSTRUCTOR |
1359 SCOPE_DESTRUCTOR))) {
1360 d->d_GNext = d->d_MyGroup->sg_GBase;
1361 d->d_Flags |= DF_ONGLIST;
1362 d->d_MyGroup->sg_GBase = d;
1363 d->d_MyGroup->sg_Flags |= SGF_GABICALL;
1366 if ((d->d_Flags & DF_ONCLIST) == 0 &&
1367 (d->d_ScopeFlags & SCOPE_CONSTRUCTOR)) {
1368 d->d_CNext = d->d_MyGroup->sg_CBase;
1369 d->d_Flags |= DF_ONCLIST;
1370 d->d_MyGroup->sg_CBase = d;
1371 d->d_MyGroup->sg_Flags |= SGF_ABICALL;
1373 if ((d->d_Flags & DF_ONDLIST) == 0 &&
1374 (d->d_ScopeFlags & SCOPE_DESTRUCTOR)) {
1375 d->d_DNext = d->d_MyGroup->sg_DBase;
1376 d->d_Flags |= DF_ONDLIST;
1377 d->d_MyGroup->sg_DBase = d;
1378 d->d_MyGroup->sg_Flags |= SGF_ABICALL;
1383 * If this procedure is bound to a DLL we have to resolve it here.
1385 if (d->d_ScopeFlags & SCOPE_CLANG) {
1386 char buf[RUNE_IDTOSTR_LEN];
1388 d->d_ProcDecl.ed_DLLFunc = FindDLLSymbol(NULL, d->d_ImportSemGroup,
1389 runeid_text(d->d_Id, buf));
1392 case DOP_ARGS_STORAGE:
1393 case DOP_STACK_STORAGE:
1394 case DOP_GLOBAL_STORAGE:
1395 case DOP_GROUP_STORAGE:
1396 type = ResolveType(d->d_StorDecl.ed_Type, NULL, 0);
1399 * Complete if the underlying type is resolved.
1401 if (type->ty_Flags & TF_RESOLVED)
1405 * Promote the lvalue storage qualifier (e.g. from a typedef) into
1406 * the declaration's scope. This is what ultimately controls lvalue
1407 * vs rvalue arguments to procedures and such.
1409 if ((type->ty_SQFlags & SF_LVALUE) &&
1410 (d->d_ScopeFlags & SCOPE_LVALUE) == 0)
1412 d->d_ScopeFlags |= SCOPE_LVALUE;
1416 * Default assignment handling expects an rvalue.
1418 if (d->d_StorDecl.ed_OrigAssExp) {
1419 d->d_StorDecl.ed_AssExp =
1420 DupExp(d->d_MyGroup, d->d_StorDecl.ed_OrigAssExp);
1421 d->d_StorDecl.ed_AssExp =
1422 ResolveExp(d->d_ImportSemGroup, d->d_MyGroup,
1423 d->d_StorDecl.ed_AssExp,
1427 if (d->d_ScopeFlags & SCOPE_LVALUE) {
1429 * Object is passed as a LValueStor structure. Note that d_Bytes
1430 * is going to be different then the underlying type (which
1431 * represents the actual object).
1433 d->d_Bytes = sizeof(LValueStor);
1434 d->d_AlignMask = LVALUESTOR_ALIGN;
1437 * Object is passed by value.
1439 d->d_AlignMask = type->ty_AlignMask;
1440 d->d_Bytes = type->ty_Bytes;
1444 * If the declaration represents or contains an argument-lvalue or a
1445 * pointer we have to add it to the SemGroup's SRBase list to
1446 * properly reference or dereference the elements. XXX only do this
1447 * for non-global storage.
1449 * If the declaration has LVALUE scope we must do the same because
1450 * the ref is tracked.
1452 if ((d->d_Flags & DF_ONSRLIST) == 0) {
1453 if (d->d_Op != DOP_GLOBAL_STORAGE &&
1454 (type->ty_Flags & TF_HASLVREF))
1456 d->d_SRNext = d->d_MyGroup->sg_SRBase;
1457 d->d_MyGroup->sg_SRBase = d;
1458 d->d_Flags |= DF_ONSRLIST;
1459 } else if (d->d_ScopeFlags & SCOPE_LVALUE) {
1460 d->d_SRNext = d->d_MyGroup->sg_SRBase;
1461 d->d_MyGroup->sg_SRBase = d;
1462 d->d_Flags |= DF_ONSRLIST;
1467 * Deal with constructor/destructor chaining. The chaining winds up
1468 * being reversed and will be corrected by the caller.
1470 * NOTE: Constructors and destructors might be referenced without the
1471 * entire SG being resolved, so be sure to set the ABI flags here.
1473 if ((d->d_Flags & DF_ONCLIST) == 0 &&
1474 (type->ty_Flags & TF_HASCONSTRUCT)) {
1475 d->d_CNext = d->d_MyGroup->sg_CBase;
1476 d->d_MyGroup->sg_CBase = d;
1477 d->d_MyGroup->sg_Flags |= SGF_ABICALL;
1478 d->d_Flags |= DF_ONCLIST;
1480 if ((d->d_Flags & DF_ONDLIST) == 0 &&
1481 (type->ty_Flags & TF_HASDESTRUCT)) {
1482 d->d_DNext = d->d_MyGroup->sg_DBase;
1483 d->d_MyGroup->sg_DBase = d;
1484 d->d_MyGroup->sg_Flags |= SGF_ABICALL;
1485 d->d_Flags |= DF_ONDLIST;
1487 if ((d->d_Flags & DF_ONGLIST) == 0 &&
1488 (type->ty_Flags & (TF_HASGCONSTRUCT | TF_HASGDESTRUCT))) {
1489 d->d_GNext = d->d_MyGroup->sg_GBase;
1490 d->d_MyGroup->sg_GBase = d;
1491 d->d_MyGroup->sg_Flags |= SGF_GABICALL;
1492 d->d_Flags |= DF_ONGLIST;
1500 d->d_Flags &= ~DF_RESOLVING;
1501 d->d_Flags |= DF_RESOLVED;
1507 * Post resolution flag resolving (to handle recursion)
1512 * Create copies of procedures as they are needed (thus avoiding an
1513 * XxY matrix effect).
1515 if ((st = d->d_ProcDecl.ed_OrigBody) == NULL) {
1516 Declaration *super = d->d_Super;
1517 while (super && super->d_ProcDecl.ed_OrigBody == NULL) {
1518 super = super->d_Super;
1521 st = super->d_ProcDecl.ed_OrigBody;
1522 d->d_ProcDecl.ed_OrigBody = st;
1525 if (st && (d->d_Flags & DF_DIDPULLDOWN) == 0) {
1527 * Procedure is being used in the primary class it was defined
1528 * in or pulled into from a super-class.
1530 * Link the procedure body to the declaration and resolve the
1531 * procedure body in the context of the correct class.
1533 * NOTE: Alignment and storage is not resolved here.
1535 d->d_Flags |= DF_DIDPULLDOWN;
1536 st = DupStmt(d->d_MyGroup, st->st_Parent, st);
1537 dassert_stmt(st, d->d_ProcDecl.ed_ProcBody == NULL);
1539 d->d_ProcDecl.ed_ProcBody = st;
1540 st->st_ProcStmt.es_Decl = d;
1541 st->st_ProcStmt.es_Scope = d->d_Scope;
1543 ResolveStmt(d->d_ImportSemGroup, st, 0);
1551 * __align(%d) scope qualifier, override the type's alignment
1553 if ((d->d_Scope.s_Flags & SCOPE_ALIGN) && d->d_Scope.s_AlignOverride)
1554 d->d_AlignMask = d->d_Scope.s_AlignOverride - 1;
1557 * Make sure that the semantic group associated with the declaration
1561 if (sg && (sg->sg_Op == SG_MODULE || sg->sg_Op == SG_CLASS)) {
1562 /* SG_COMPOUND too? maybe not */
1563 ResolveSemGroup(d->d_MyGroup, 0);
1568 * ResolveExp() - resolve expression
1570 * Resolve an expression. We are expected to resolve all ex_Type's for the
1571 * expression tree as well as expected to track down operators and base
1574 * itype is a type hint. If non-NULL, the caller would like our expression
1575 * to return the specified type. There are a few special cases:
1577 * EXF_REQ_ARRAY - when OBRACKET requests an array optimization it passes a
1578 * post-array-indexed typehint (as if you had done the optimization). You
1579 * must ignore itype if you are unable to do the optimization.
1581 * NOTE: Even rvalues may have refstor side-effects at run-time.
1584 #define exFlags exp->ex_Flags
1585 #define exFlags2 exp->ex_Flags2
1586 #define exType exp->ex_Type
1587 #define exToken exp->ex_Token
1588 #define exDecl exp->ex_Decl
1589 #define exLhs exp->ex_Lhs
1590 #define exVisibility exp->ex_Visibility
1591 #define exRhs exp->ex_Rhs
1592 #define exId exp->ex_Id
1593 #define exStr exp->ex_Str
1597 ResolveExp(SemGroup *isg, SemGroup *sg, Exp *exp, Type *itype, int flags)
1602 * Expressions can only be resolved once. If we hit this assertion
1603 * it likely means someone forgot to DupExp() an expression somewhere.
1605 dassert_exp(exp, (exFlags & EXF_RESOLVED) == 0);
1607 if (exp->ex_Flags & EXF_DUPEXP)
1608 exp = DupExp(sg, exp);
1612 * Ensure that the cast target type hint is resolved.
1615 ResolveType(itype, NULL, 0);
1618 * note: certain cases below call other resolver functions and assume
1619 * that ex* variables are unchanged.
1621 dassert((exFlags & EXF_DUPEXP) || (exFlags & EXF_RESOLVED) == 0);
1626 * An assignment. Note that we optimize void returns (such as when
1627 * an assignment is a statement like 'a = 4;' ... the result of the
1628 * assignment is cast to void.
1630 * NOTE: Left-hand-side must be an LVALUE, return type inherits this
1631 * feature unless the parent turns off the bit so the TOK_ASS
1632 * run-time must deal with that.
1634 exLhs = ResolveExp(isg, sg, exLhs, NULL,
1635 flags & ~RESOLVE_AUTOCAST);
1636 //dassert_exp(exLhs, exLhs->ex_Type->ty_SQFlags & SF_LVALUE);
1637 dassert_exp(exLhs, exLhs->ex_Flags2 & EX2F_LVALUE);
1639 exRhs = ResolveExp(isg, sg, exRhs,
1640 DEL_LVALUE(exLhs->ex_Type),
1641 flags | RESOLVE_AUTOCAST);
1642 if (exLhs->ex_Type->ty_SQFlags & SF_CONST) {
1643 dfatal_exp(exp, TOK_ERR_READONLY, NULL);
1646 /* AssExp handles this optimization */
1647 if (itype == &VoidType) {
1649 exFlags |= EXF_RET_VOID;
1651 exType = exLhs->ex_Type;
1656 * NOTE: BoolType global implies an rvalue.
1659 exLhs = ResolveExp(isg, sg, exLhs, &BoolType,
1660 flags | RESOLVE_AUTOCAST);
1664 * If left-side can terminate the operation, mark the expression as
1665 * PROBCONST for the interpreter and code generator (allowing the rhs
1666 * to not be a constant).
1668 if (exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) {
1671 exLhs = resolveConstExpBool(isg, sg, exLhs, flags, &ts);
1672 if (ts.ts_Bool == 0)
1673 exFlags |= EXF_PROBCONST;
1678 * Resolve rhs, and we can also flag PROBCONST if both sides are
1681 exRhs = ResolveExp(isg, sg, exRhs, &BoolType,
1682 flags | RESOLVE_AUTOCAST);
1683 if ((exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) &&
1684 (exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST))) {
1685 exFlags |= EXF_PROBCONST;
1691 * NOTE: BoolType global implies an rvalue.
1694 exLhs = ResolveExp(isg, sg, exLhs, &BoolType,
1695 flags | RESOLVE_AUTOCAST);
1699 * If left-side can terminate the operation, mark the expression as
1700 * PROBCONST for the interpreter and code generator (allowing the rhs
1701 * to not be a constant).
1703 if (exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) {
1706 exLhs = resolveConstExpBool(isg, sg, exLhs, flags, &ts);
1708 exFlags |= EXF_PROBCONST;
1713 * Resolve rhs, and we can also flag PROBCONST if both sides are
1716 exRhs = ResolveExp(isg, sg, exRhs, &BoolType,
1717 flags | RESOLVE_AUTOCAST);
1718 if ((exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) &&
1719 (exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST))) {
1720 exFlags |= EXF_PROBCONST;
1726 * This synthesized token occurs when we are able to collapse a
1727 * structural indirection or dotted element into a declaration. For
1728 * example, 'module.routine'.
1730 /* XXX couldconst? */
1735 * Structual field access. The left hand side may be an object
1736 * (class or compound), a class type, or a compound type.
1738 * A dotted access requires an lvalue on the left hand side if the
1739 * left hand side represents storage.
1741 * The result will be an lvalue if the right hand side represents
1742 * storage. We only loop if the right hand side is an alias
1754 int procedureOnly = 0;
1755 int eno = TOK_ERR_ID_NOT_FOUND;
1758 * NOTE: Hint must 'always happen' since we may be modifying an
1759 * expression that will later be Dup'd.
1761 * NOTE: Lhs is always an lvalue for TOK_DOT, but does not have
1762 * to be for TOK_STRIND.
1764 exLhs->ex_Flags |= EXF_REQ_TYPE;
1765 if (exToken == TOK_DOT)
1766 exLhs->ex_Flags |= exFlags & EXF_ADDRUSED;
1767 exLhs = ResolveExp(isg, sg, exLhs, NULL, flags & ~RESOLVE_AUTOCAST);
1770 * It shouldn't be possible for the RHS to be turned into
1771 * a TOK_SEMGRP_ID prior to resolution.
1773 * (XXX shouldn't be possible.
1775 dassert(exRhs->ex_Token != TOK_SEMGRP_ID);
1776 dassert_exp(exRhs, exRhs->ex_Token == TOK_STRUCT_ID);
1777 exRhs = ResolveExp(isg, sg, exRhs, NULL, flags & ~RESOLVE_AUTOCAST);
1779 type = exLhs->ex_Type;
1782 * Calculate scope and SemGroup to search. Note that it is legal
1783 * to do a structural '.' selection on a pointer, but it works
1784 * differently then indirecting through a pointer via '->'. In
1785 * the case of '.' on a pointer, we first search the system
1788 if (exLhs->ex_Flags & EXF_RET_TYPE) {
1793 * Figure out the base type used to look-up the identifier. An
1794 * identifier that resolves into a procedure winds up only being
1795 * a hint for a reference type.
1797 if (exToken == TOK_STRIND) {
1798 switch (type->ty_Op) {
1800 type = type->ty_RawPtrType.et_Type;
1803 type = type->ty_RefType.et_Type;
1807 dassert_exp(exp, 0);
1813 switch (type->ty_Op) {
1815 sg2 = type->ty_ClassType.et_SemGroup;
1818 sg2 = type->ty_CompType.et_SemGroup;
1821 sg2 = type->ty_ArgsType.et_SemGroup;
1824 sg2 = type->ty_VarType.et_SemGroup;
1827 sg2 = type->ty_ImportType.et_SemGroup;
1831 dassert_exp(exp, PointerType.ty_Op == TY_CLASS);
1832 sg2 = PointerType.ty_ClassType.et_SemGroup;
1836 dassert_exp(exp, ReferenceType.ty_Op == TY_CLASS);
1837 sg2 = ReferenceType.ty_ClassType.et_SemGroup;
1841 * Possibly a pointer, aka ptr.NULL
1845 visibility = exLhs->ex_Visibility;
1848 * Locate the identifier normally, via its type. ty_Visbility
1849 * is the initial visibility (scope) that the semantic search
1850 * should use in locating the identifier.
1853 runeid_t ary[2] = { id, 0 };
1856 if (exLhs->ex_Token == TOK_ID ||
1857 exLhs->ex_Token == TOK_DECL) {
1858 if (exLhs->ex_Decl->d_Search) {
1859 level = exLhs->ex_Decl->d_Search->sg_Level;
1861 level = sg2->sg_Level;
1865 * SUPER (super.blah and super->blah) handling
1867 if (exLhs->ex_Flags & EXF_SUPER) {
1869 fprintf(stderr, "No superclass available\n");
1870 dassert_exp(exp, 0);
1875 level = sg2->sg_Level; /* may be -1 */
1877 visibility &= type->ty_Visibility;
1878 d = FindDeclPath(&exp->ex_LexRef, NULL,
1881 &visibility, level, &eno);
1883 * SUPER (super.blah and super->blah) handling
1885 * If the super is visible and a procedure we just found
1886 * our own refinement, not the superclass method.
1887 * This is because there is no 'superclass method' per say,
1888 * refinements *REPLACE* superclass declarations and inherit
1889 * the superclass's level. However, we still want to be able
1890 * to chain method calls so what we do instead is go through
1891 * and find the procedure that we smacked when we did the
1892 * refinement. This procedure has already been conveniently
1893 * brought into the subclass context as an 'invisible' entity
1894 * at the same d_Level.
1896 * The run-time detects the EXF_SUPER case and knows it can
1897 * use a static call instead of a dynamic call, so we need
1898 * to get this right.
1900 if ((exLhs->ex_Flags & EXF_SUPER) && d &&
1901 d->d_Op == DOP_PROC &&
1902 (d->d_ScopeFlags & SCOPE_ALL_VISIBLE))
1904 runeid_t id2 = d->d_Id;
1905 SemGroup *olevel = d->d_Level;
1907 while ((d = RUNE_NEXT(d, d_Node)) != NULL) {
1908 if (d->d_Id == id2 &&
1909 d->d_Level == olevel &&
1910 d->d_Op == DOP_PROC)
1920 if (d && procedureOnly && d->d_Op != DOP_PROC) {
1922 "PTR.ELEMENT may be used for special "
1923 "pointer method calls, but not to "
1924 "access storage elements. "
1925 "Use PTR->ELEMENT instead\n");
1926 dassert_exp(exp, 0);
1930 * If referencing actual storage the storage must be declared
1933 if (d && globalOnly && (d->d_Op & DOPF_STORAGE) &&
1934 (d->d_ScopeFlags & SCOPE_GLOBAL) == 0)
1936 char buf[RUNE_IDTOSTR_LEN];
1938 "%s is not global. Only globals can be accessed "
1940 runeid_text(d->d_Id, buf));
1941 dassert_exp(exp, 0);
1946 * Identifier found. Note that if we are going through a
1947 * reference type the declaration is not the actual one we
1948 * use at run time. It's just a template.
1952 exVisibility = visibility;
1954 if (exFlags & EXF_REQ_ADDROF)
1955 d->d_Flags |= DF_ADDROF;
1956 if (exFlags & EXF_ADDRUSED)
1957 d->d_Flags |= DF_ADDRUSED;
1964 exType = d->d_ProcDecl.ed_Type;
1965 if (d->d_ProcDecl.ed_Type->ty_SQFlags & SF_METHOD) {
1967 * Method call, do not collapse the expression into a
1968 * direct declaration because the object is needed
1972 (exLhs->ex_Flags & EXF_RET_TYPE) == 0);
1973 } else if (isRefTo) {
1975 * Call via reference. The lhs is required to
1976 * evaluate the actual method call at run-time.
1980 * Global method call or normal call. For the global
1981 * method case the lhs is not needed because the
1982 * parser entered the first argument as a type
1985 * Degenerate into a TOK_DECL. We depend on this
1986 * later. (mark ex_Type as parse-time for DupExp).
1988 exFlags &= ~EXF_BINARY;
1989 exFlags |= EXF_PARSE_TYPE;
1996 exType = ResolveTypeSimple(d->d_AliasDecl.ed_Type);
1997 dassert_decl(d, d->d_AliasDecl.ed_OrigAssExp != NULL);
2000 * NOTE: exLhs must be NULL if exp is unresolved. exp
2001 * tree duplications do not duplicate the alias's
2002 * exLHS even though UNARY is set.
2004 * DupExp is absolutely required due to the alias's
2005 * target context being different for each consumer.
2007 dassert_exp(exp, exRhs->ex_Lhs == NULL);
2008 exRhs->ex_Flags |= EXF_ALIAS | EXF_UNARY;
2009 exRhs->ex_Lhs = DupExp(sg2, d->d_AliasDecl.ed_OrigAssExp);
2010 exRhs->ex_Lhs = ResolveExp(isg, sg2,
2013 flags | RESOLVE_AUTOCAST);
2014 exFlags2 |= exRhs->ex_Flags2 & EX2F_LVALUE;
2016 case DOP_ARGS_STORAGE:
2017 case DOP_STACK_STORAGE:
2018 case DOP_GLOBAL_STORAGE:
2019 case DOP_GROUP_STORAGE:
2021 * Set type. The Rhs is a STRUCT_ID and does not require
2022 * a type to be assigned to it.
2024 * Return type is always an LVALUE, parent may adjust.
2026 //exType = ADD_LVALUE(d->d_StorDecl.ed_Type);
2027 exType = ResolveTypeSimple(d->d_StorDecl.ed_Type);
2028 exFlags2 |= EX2F_LVALUE;
2031 * Pull up global constants
2033 if (exToken == TOK_DOT &&
2034 d->d_Op == DOP_GLOBAL_STORAGE &&
2035 (d->d_ScopeFlags & SCOPE_READONLY) &&
2036 (exLhs->ex_Flags & EXF_RET_TYPE))
2038 exFlags |= EXF_PROBCONST;
2043 * XXX make sure this is only used in the lhs of a
2044 * structural reference. XXX
2046 * XXX what if we went through a TY_RETO type? This type
2049 * collapse the exp node.
2051 exType = d->d_TypedefDecl.ed_Type;
2053 exFlags &= ~EXF_BINARY;
2057 * Do not collapse an import, we require more resolution.
2058 * e.g. import.<blah> will be collapsed, but 'import'
2061 if (exFlags & EXF_REQ_TYPE) {
2064 &d->d_ImportDecl.ed_SemGroup->sg_ClassList,
2065 d->d_ImportDecl.ed_SemGroup,
2067 exFlags |= EXF_RET_TYPE;
2073 * Do not collapse a class, we require more resolution.
2074 * e.g. class.<blah> will be collapsed, but 'class'
2077 if (exFlags & EXF_REQ_TYPE) {
2078 exType = d->d_ClassDecl.ed_SemGroup->sg_ClassType;
2079 exType = TypeToVisibilityType(exType, visibility);
2080 exFlags |= EXF_RET_TYPE;
2083 exType = ResolveTypeSimple(exType);
2084 exFlags2 |= EX2F_LVALUE;
2087 dassert_exp(exp, 0);
2090 if (d->d_Op == DOP_PROC) {
2091 if (d->d_ScopeFlags & SCOPE_PURE)
2093 } else if (exType->ty_SQFlags & SF_CONST) {
2096 } else if ((s = SpecialSemGroupGet(id)) != 0) {
2098 * Identifier not found, check for a special identifier.
2100 exRhs->ex_Token = TOK_SEMGRP_ID;
2101 exRhs->ex_Int32 = s;
2106 dassert(type->ty_Op == TY_PTRTO || type->ty_Op == TY_REFTO);
2107 /* NULL is not an lvalue */
2108 /* exType = DEL_LVALUE(type); */
2110 exFlags |= EXF_NULL;
2111 exFlags2 &= ~EX2F_LVALUE;
2114 dassert(type->ty_Op != TY_PTRTO && type->ty_Op != TY_REFTO);
2115 exType = &Int32Type;
2120 * typeof(self.__data[]) vs (cast)self.__data[]
2122 dassert(type->ty_Op != TY_PTRTO && type->ty_Op != TY_REFTO);
2123 dassert(exFlags & EXF_REQ_ARRAY);
2124 exFlags |= EXF_RET_ARRAY;
2125 if (s == SPECIAL_TYPE) {
2126 exFlags |= EXF_RET_TYPE;
2127 exType = &DynamicLValueType;
2128 } else if (exFlags & EXF_REQ_TYPE) {
2129 exFlags |= EXF_RET_TYPE;
2130 exType = &DynamicLValueType;
2135 * dynamic data must be cast
2137 dassert_exp(exp, 0);
2138 exType = &DynamicLValueType;
2141 case SPECIAL_VAR_COUNT:
2142 dassert(type->ty_Op != TY_PTRTO && type->ty_Op != TY_REFTO);
2143 exType = &Int32Type;
2144 sg->sg_Flags |= SGF_ABICALL;
2146 case SPECIAL_VAR_TYPE:
2147 case SPECIAL_VAR_DATA:
2149 * typeof(self.__vardata[]) vs (cast)self.__vardata[]
2151 dassert(type->ty_Op != TY_PTRTO && type->ty_Op != TY_REFTO);
2152 dassert(exFlags & EXF_REQ_ARRAY);
2153 exFlags |= EXF_RET_ARRAY;
2154 if (s == SPECIAL_TYPE) {
2155 exFlags |= EXF_RET_TYPE;
2156 exType = &DynamicLValueType;
2157 } else if (exFlags & EXF_REQ_TYPE) {
2158 exFlags |= EXF_RET_TYPE;
2159 exType = &DynamicLValueType;
2164 * dynamic data must be cast
2166 dassert_exp(exp, 0);
2167 exType = &DynamicLValueType;
2169 sg->sg_Flags |= SGF_ABICALL;
2171 case SPECIAL_TYPEID:
2172 exType = &Int32Type;
2174 case SPECIAL_TYPESTR:
2178 dassert_exp(exRhs, 0);
2183 * This is nasty, I admit. If we have a pointer or reference
2187 if (type->ty_Op == TY_REFTO) {
2188 type = type->ty_RefType.et_Type;
2192 if (type->ty_Op == TY_PTRTO) {
2193 type = type->ty_RawPtrType.et_Type;
2197 dfatal_exp(exRhs, eno, NULL);
2200 ResolveTypeSimple(exType);
2202 dassert_exp(exp, exType != NULL);
2206 * NOTE: unresolved identifiers should not have alias expression
2207 * sub-tree duplications attached to them. assert it.
2209 dassert_exp(exp, exLhs == NULL);
2213 * NOTE: LVALUE/RVALUE for elements and return type depends on the
2214 * operator. Operator functions normally self-optimize the cases at
2218 exp = resolveExpOper(isg, sg, exp, itype,
2219 flags & ~RESOLVE_AUTOCAST);
2223 * Indirect through an expression.
2225 * Return type is typically an LVALUE (if representing storage). Exp
2226 * parent might turn it off so run-time must test. Lhs may or may
2232 exLhs = ResolveExp(isg, sg, exLhs, NULL, flags & ~RESOLVE_AUTOCAST);
2233 type = exLhs->ex_Type;
2235 switch (type->ty_Op) {
2237 fprintf(stderr, "You cannot use '*' on a reference type\n");
2238 dassert_exp(exLhs, 0);
2240 //exType = ADD_LVALUE(type->ty_RefType.et_Type);
2241 exType = type->ty_RefType.et_Type;
2242 exFlags2 |= EX2F_LVALUE;
2246 //exType = ADD_LVALUE(type->ty_RawPtrType.et_Type);
2247 exType = type->ty_RawPtrType.et_Type;
2248 exFlags2 |= EX2F_LVALUE;
2251 dassert_exp(exLhs, 0);
2258 * Take the address of an (LVALUE) expression. Returns an RVALUE.
2259 * Allow for a short-cut optimization which replaces the TOK_ADDR
2260 * sequence with its argument in the &ary[n] case.
2266 * Hint must 'always happen' since we may be modifying an
2267 * expression that will later be Dup'd.
2269 * It is sufficient to test EXF_ADDRUSED to determine if
2270 * SRSGET/SRSPUT is needed for the procedure.
2272 exLhs->ex_Flags |= EXF_REQ_ADDROF | EXF_ADDRUSED;
2273 exLhs = ResolveExp(isg, sg, exLhs, NULL,
2274 flags & ~RESOLVE_AUTOCAST);
2275 if (exLhs->ex_Flags & EXF_RET_ADDROF) {
2278 type = exLhs->ex_Type;
2279 //dassert_exp(exLhs, type->ty_SQFlags & SF_LVALUE);
2280 dassert_exp(exLhs, exLhs->ex_Flags2 & EX2F_LVALUE);
2281 exType = ResolveTypeSimple(TypeToRawPtrType(type));
2282 /* DEL_LVALUE() not needed here */
2288 * Array index, takes an RVALUE, returns an LVALUE.
2290 * Note: we have to convert the special __data[exp] case.
2292 * Note: ex_Flags hints must 'always happen' since we may be
2293 * modifying an expression that will later be Dup'd.
2295 exRhs = ResolveExp(isg, sg, exRhs, NULL, flags & ~RESOLVE_AUTOCAST);
2296 if (exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) {
2297 exRhs = resolveConstExp(isg, sg, exRhs, flags | RESOLVE_FAILOK);
2299 exLhs->ex_Flags |= EXF_REQ_ARRAY | (exFlags & EXF_REQ_TYPE);
2300 exLhs->ex_Flags |= EXF_ADDRUSED /* | (exFlags & EXF_REQ_ADDROF) */ ;
2301 exLhs->ex_AuxExp = exRhs;
2302 exLhs = ResolveExp(isg, sg, exLhs, itype, flags & ~RESOLVE_AUTOCAST);
2305 * If we are indexing an actual array we have to retain EXF_ADDRUSED
2306 * to prevent it from being cached in a register. Otherwise we are
2307 * indirecting through a pointer and not taking the address of the
2308 * pointer itself. (tests/cat.d uses gets() which is a good test of
2311 if (exLhs->ex_Type && exLhs->ex_Type->ty_Op != TY_ARYOF)
2312 exLhs->ex_Flags &= ~(EXF_ADDRUSED | EXF_REQ_ADDROF);
2314 if (MatchType(&IntegralType, exRhs->ex_Type) >= SG_COMPAT_FAIL) {
2315 dfatal_exp(exRhs, TOK_ERR_EXPECTED_INTEGRAL_TYPE, NULL);
2318 if (exLhs->ex_Flags & EXF_RET_ARRAY) {
2320 * __data and __vardata specials
2322 /* don't modify ex_Token, EXF_DUPEXP might be set */
2323 /* exp->ex_Token = TOK_ERR_EXP_REMOVED; */
2325 } else if (exFlags & EXF_REQ_ADDROF) {
2327 * &ary[i] optimization - allows us to create a bounded pointer
2328 * (returns an RVALUE).
2330 * XXX now we just return a raw pointer
2334 exFlags |= EXF_RET_ADDROF;
2336 dassert((exLhs->ex_Flags & EXF_RET_TYPE) == 0);
2338 exLhs->ex_AuxExp = NULL;
2339 type = exLhs->ex_Type;
2341 switch (type->ty_Op) {
2343 type = type->ty_AryType.et_Type;
2346 type = type->ty_RawPtrType.et_Type;
2349 /* Cannot take address of a reference type */
2350 dassert_exp(exp, 0);
2353 exType = ResolveType(TypeToRawPtrType(type), NULL, 0);
2354 /* returns an RVALUE */
2357 * Unoptimized array lookup, returns an lvalue
2361 dassert((exLhs->ex_Flags & EXF_RET_TYPE) == 0);
2363 exLhs->ex_AuxExp = NULL;
2364 type = exLhs->ex_Type;
2366 switch (type->ty_Op) {
2368 type = type->ty_AryType.et_Type;
2371 type = type->ty_RawPtrType.et_Type;
2374 fprintf(stderr, "Cannot index a reference type\n");
2375 dassert_exp(exp, 0);
2378 //exType = ADD_LVALUE(type);
2379 exType = ResolveTypeSimple(type);
2380 exFlags2 |= EX2F_LVALUE;
2381 /* returns an LVALUE */
2385 dassert_exp(exp, 0); /* XXX */
2390 * XXX we should return a bounded pointer here.
2393 exFlags |= EXF_CONST;
2395 if ((exFlags2 & EX2F_ESCDONE) == 0) {
2398 exFlags2 |= EX2F_ESCDONE;
2399 str = StrTableEscapeQuotedString(exStr, strlen(exStr), 1);
2400 ReplaceStrTable(&exp->ex_Str, str);
2405 * Set EXF_PARSE_TYPE to make sure that ex_Type survives DupExp().
2407 * exp->u.uint32 is always set to the single-quoted result
2411 exFlags |= EXF_CONST | EXF_PARSE_TYPE;
2412 dassert(exType != NULL);
2416 * Integer and related type is already loaded into the exp
2419 exFlags |= EXF_CONST;
2420 dassert(exType != NULL);
2424 * Float and related type is already loaded into the exp
2427 exFlags |= EXF_CONST;
2428 dassert(exType != NULL);
2435 * The self identifier represents the current procedure's arguments.
2436 * A varargs procedure will actually be called with an extended
2437 * version of this type, but for resolution purposes we can use this
2440 * This is an LVALUE to support things like self.new() XXX.
2442 //exType = ADD_LVALUE(resolveArgsType(sg, flags));
2443 exType = ResolveTypeSimple(resolveArgsType(sg, flags));
2444 exFlags2 |= EX2F_LVALUE;
2448 * The '$' identifier represents the current procedure's return
2451 if (sg->sg_Flags & SGF_DIDRESULT)
2452 dfatal_exp(exp, TOK_ERR_RESULT_SEQUENCING, NULL);
2453 //exType = ADD_LVALUE(resolveReturnType(sg, flags));
2454 exType = ResolveTypeSimple(resolveReturnType(sg, flags));
2455 exFlags2 |= EX2F_LVALUE;
2460 * Lookup the identifier. The returned declaration could represent a
2461 * class, typedef, module, or storage, but for this case we only
2462 * allow storage or a constant. Since we are starting from our own
2463 * semantic group, visibility is initially ALL (private, library, and
2466 * The identifier might represent something at a higher scoping
2467 * layer. For example, a nested procedure accessing a variable in
2468 * the parent procedure or a method procedure in a class accessing an
2469 * element of the object.
2471 * It is also possible for the current execution scoping layer (sg)
2472 * to have a secondary contextual layer from which global constants
2473 * can be accessed. This is typically set when resolving procedure
2474 * arguments for procedures called through objects or types. Only
2475 * type globals can be accesed via this shortcut.
2477 * This returns an LVALUE if the id represents storage.
2481 int eno = TOK_ERR_ID_NOT_FOUND;
2486 * Special case 'super'. XXX TY_REFTO
2488 * Make an in-place change to the expression structure. 'super'
2489 * is actually 'this' with the EXF_SUPER flag set.
2491 if (exId == RUNEID_SUPER) {
2493 exFlags |= EXF_SUPER;
2495 ary[0] = exp->ex_Id;
2498 exDecl = FindDeclPath(&exp->ex_LexRef, isg, sg,
2500 FDC_NULL, &exVisibility,
2502 if (exDecl == NULL) {
2503 exDecl = FindDeclPathAltContext(&exp->ex_LexRef, isg, sg,
2505 FDC_NULL, &exVisibility,
2508 if (exDecl == NULL) {
2509 dfatal_exp(exp, eno, NULL);
2513 * The EXF flag is set by TOK_ADDR, possibly propagated down via
2514 * TOK_DOT. Use this to flag that the stack context might be
2515 * used outside of its normal life. LValue scoped declarations
2516 * do not count because they have their own RefStor.
2518 * (This code is primarily responsible for causing SRSGET and
2519 * SRSPUT instructions to be emitted).
2521 if ((exFlags & EXF_ADDRUSED) &&
2522 (exDecl->d_Scope.s_Flags & SCOPE_LVALUE) == 0)
2524 exDecl->d_MyGroup->sg_Flags |= SGF_ADDRUSED;
2528 * We have to resolve the declaration here, we no longer have the
2529 * redundancy to resolve it elsewhere.
2531 if ((exDecl->d_Flags & DF_RESOLVING) == 0)
2532 ResolveDecl(exDecl, 0);
2535 switch (exDecl->d_Op) {
2536 case DOP_ARGS_STORAGE:
2537 if (sg->sg_Flags & SGF_DIDRESULT)
2538 dfatal_exp(exp, TOK_ERR_RESULT_SEQUENCING, NULL);
2540 case DOP_STACK_STORAGE:
2541 case DOP_GLOBAL_STORAGE:
2542 case DOP_GROUP_STORAGE:
2544 * Storage identifiers are lvalues.
2546 * Try to delay this step, giving the language more flexibility
2547 * in avoiding resolver loops from interdependencies that can
2550 * We can't delay this step when resolving an expression that the
2551 * resolver needs an actual constant result for.
2553 //exType = ADD_LVALUE(exDecl->d_StorDecl.ed_Type);
2554 exType = ResolveTypeSimple(exDecl->d_StorDecl.ed_Type);
2555 exFlags2 |= EX2F_LVALUE;
2556 if (exFlags & EXF_ADDRUSED)
2557 exDecl->d_Flags |= DF_ADDRUSED;
2558 if (exFlags & EXF_REQ_ADDROF)
2559 exDecl->d_Flags |= DF_ADDROF;
2560 if (exType->ty_SQFlags & SF_CONST)
2565 * Aliases are rvalues (even if they could be lvalues).
2566 * XXX actually allow them to be lvalues too.
2568 exType = ResolveTypeSimple(exDecl->d_AliasDecl.ed_Type);
2569 exFlags |= EXF_ALIAS | EXF_UNARY;
2572 * NOTE: exLhs must be NULL if exp is unresolved. exp tree
2573 * duplications do not duplicate the alias's exLHS even though
2574 * UNARY is set. However, because we probably have not actually
2575 * duplicated exp yet, we have to clear the field in our pre-dup
2578 * NOTE: DupExp is absolutely required due to the alias's target
2579 * context being different for each consumer.
2581 if (exFlags & EXF_DUPEXP)
2583 dassert_exp(exp, exLhs == NULL);
2584 exLhs = DupExp(sg, exDecl->d_AliasDecl.ed_OrigAssExp);
2585 exLhs = ResolveExp(isg, sg, exLhs, exType,
2586 flags | RESOLVE_AUTOCAST);
2589 * Inherit EXF_NULL (NULL pointer special) through the alias,
2590 * otherwise it will not be assignable to arbitrary pointers.
2592 exFlags |= exLhs->ex_Flags & EXF_NULL;
2593 exFlags2 |= exLhs->ex_Flags2 & EX2F_LVALUE;
2598 * A procedural identifier.
2600 * Note: procedural pointers cannot be changed so they are not
2603 dassert_exp(exp, (exFlags & EXF_REQ_PROC));
2604 exType = exDecl->d_ProcDecl.ed_Type;
2605 if (exDecl->d_ScopeFlags & SCOPE_PURE)
2609 if (exFlags & EXF_REQ_TYPE) {
2610 exType = exDecl->d_TypedefDecl.ed_Type;
2611 exFlags |= EXF_RET_TYPE;
2614 dassert_exp(exp, 0);
2617 if (exFlags & EXF_REQ_TYPE) {
2618 exType = exDecl->d_ClassDecl.ed_SemGroup->sg_ClassType;
2619 exType = TypeToVisibilityType(exType, exVisibility);
2620 exFlags |= EXF_RET_TYPE;
2623 dassert_exp(exp, 0);
2626 if (exFlags & EXF_REQ_TYPE) {
2629 &exDecl->d_ImportDecl.ed_SemGroup->sg_ClassList,
2630 exDecl->d_ImportDecl.ed_SemGroup,
2632 exFlags |= EXF_RET_TYPE;
2635 dassert_exp(exp, 0);
2638 dassert_exp(exp, 0);
2643 * NOTE: BoolType global implies an rvalue.
2646 exLhs = ResolveExp(isg, sg, exLhs, &BoolType,
2647 flags | RESOLVE_AUTOCAST);
2650 if (exFlags & EXF_REQ_TYPE) {
2651 ResolveType(exType, NULL, 0);
2652 exFlags |= EXF_RET_TYPE;
2654 dassert_exp(exp, 0);
2659 * User cast (or maybe the parser inserted it). Try to resolve the
2660 * expression with the requested type hint but tell ResolveExp() not
2661 * to force the cast.
2663 * Then check the result. If ResolveExp() was not able to optimize
2664 * the requested cast then resolve the cast.
2666 * If the types are compatible we still keep the TOK_CAST node in
2667 * place for the moment. XXX we really need to formalized how
2668 * ex_Type is set Similar vs Exact.
2670 * NOTE: Cast results are always an RVALUE. XXX validate here.
2673 if ((exFlags & EXF_PARSE_TYPE) == 0) {
2674 exRhs->ex_Flags |= EXF_REQ_TYPE;
2675 exRhs = ResolveExp(isg, sg, exRhs, NULL,
2676 flags & ~RESOLVE_AUTOCAST);
2677 exType = exRhs->ex_Type;
2679 exLhs = ResolveExp(isg, sg, exLhs, exType,
2680 flags & ~RESOLVE_AUTOCAST);
2681 if (SimilarType(exType, exLhs->ex_Type) == 0) {
2682 exp = resolveExpCast(isg, sg, exLhs, exType, flags);
2685 /* propagate NULL flag to allow cast to any pointer type */
2686 if (exLhs->ex_Flags & EXF_NULL)
2687 printf("LHS NULL\n");
2688 exp->ex_Flags |= exLhs->ex_Flags & EXF_NULL;
2693 * Calls require the RHS to be a compound expression representing the
2694 * procedure arguments. METHOD calls insert the lhs as the first rhs
2695 * argument by creating a placeholder which is then properly cast
2696 * as part of the compound-argument resolver, and dealt with at
2699 * XXX deal with pointer-to-function verses function XXX the lhs must
2700 * at the moment resolve to the procedure itself.
2702 * In regards to procedure pointers, the declaration will require a
2703 * pointer to the procedure's statement body. XXX this pointer can
2704 * be the physical storage associated with the lhs data but thus
2705 * requires the type to be a pointer. We do not support the 'C'
2706 * (*ptr_to_func)(...) form. You have to use ptr_to_func(...).
2710 Type *atype; /* type for alt context */
2711 SemGroup *save_asg; /* save old alt context */
2713 dassert_exp(exRhs, exRhs->ex_Token == TOK_COMPOUND);
2716 * Note: ex_Flags hints must 'always happen' since we may be
2717 * modifying an expression that will later be Dup'd.
2719 exLhs->ex_Flags |= EXF_REQ_PROC;
2720 exLhs->ex_Flags |= EXF_ADDRUSED;
2721 exLhs = ResolveExp(isg, sg, exLhs, NULL,
2722 flags & ~RESOLVE_AUTOCAST);
2723 ltype = exLhs->ex_Type;
2724 dassert_exp(exLhs, ltype != NULL &&
2725 ltype->ty_Op == TY_PROC);
2726 dassert_exp(exLhs, exLhs->ex_Decl != NULL);
2727 dassert_exp(exRhs, exRhs->ex_Token == TOK_COMPOUND);
2730 * If the lhs type indicates a method procedure, then it's lhs
2731 * is the object we wish to pass as the first argument to the
2732 * method. We dup the lhs exp. For a STRIND TY_PTRTO
2733 * method call we indirect the element and convert it to a
2734 * TOK_DOT lvalue argument of the underlying object.
2736 * A method call via a reference object is a very weird case.
2738 * Since the method called through an object winds up being a
2739 * method tailored for that object, and we are calling through a
2740 * reference to an object, the actual method will be looked up at
2741 * run time and will match the object. Thus we can safely
2742 * indirect through the reference object for this one case. Since
2743 * (*ref_obj) is not normally allowed this will be special-cased
2744 * at compile-time or run-time.
2746 * Note that this occurs before we evaluate the compound
2747 * expression on the right hand side. Also note that since the
2748 * resolver can be called multiple times on a shared expression,
2749 * we have to be careful to shift the arguments around only once.
2751 if ((ltype->ty_SQFlags & SF_METHOD) &&
2752 (exRhs->ex_Flags & EXF_CALL_CONV) == 0)
2757 lhs = exLhs->ex_Lhs;
2759 exRhs->ex_Flags |= EXF_CALL_CONV;
2760 nexp = AllocExp(NULL);
2761 nexp->ex_Token = TOK_THISARG;
2762 nexp->ex_Type = lhs->ex_Type;
2763 nexp->ex_Flags |= EXF_PARSE_TYPE;
2764 nexp->ex_Flags2 |= EX2F_LVALUE;
2765 LexDupRef(&lhs->ex_LexRef, &nexp->ex_LexRef);
2767 switch (exLhs->ex_Token) {
2768 case TOK_STRIND: /* indirect */
2770 * Calling through a ref or pointer
2776 * blah e.g.func_id (resolved)
2780 * NOTE: Do not set EXF_RESOLVED, we need to call the
2781 * resolver to properly propagate ADDRUSED.
2786 * Calling via '.', e.g. stdin->efd.importdesc().
2787 * Take the address of stdin->efd, which will give
2788 * us a pointer rather than a reference. It is not
2789 * possible to obtain a reference from an embedded type.
2790 * This will trigger resolution of the pointer *this
2791 * of the method rather than the @this version.
2793 * If this is a pointer or reference, it will match the
2794 * built-in methods for PointerType and ReferenceType.
2796 * Pass directly as an lvalue. If this is a pointer or
2797 * reference only the builtin methods for the Pointer
2798 * or Reference class are possible. These methods
2799 * require a content-locked reference.
2801 if (lhs->ex_Type->ty_Op == TY_CLASS) {
2804 ntmp = AllocExp(NULL);
2805 ntmp->ex_Lhs = nexp;
2806 ntmp->ex_Token = TOK_ADDR;
2807 ntmp->ex_Type = TypeToRawPtrType(lhs->ex_Type);
2808 ntmp->ex_Flags |= EXF_UNARY | EXF_PARSE_TYPE;
2813 dassert_exp(exp, 0);
2819 * Make sure atype survives DupExp().
2821 //lhs->ex_Flags |= EXF_PARSE_TYPE;
2822 atype = lhs->ex_Type;
2827 //lhs = DupExp(sg, lhs);
2828 //lhs->ex_Next = exRhs->ex_Lhs;
2829 //exRhs->ex_Lhs = lhs;
2831 nexp->ex_Next = exRhs->ex_Lhs;
2832 nexp->ex_Flags |= EXF_PARSE_TYPE;
2833 exRhs->ex_Lhs = nexp;
2834 } else if (ltype->ty_SQFlags & SF_METHOD) {
2835 atype = exRhs->ex_Lhs->ex_Type;
2841 * Try to set an alternative search context during resolution of
2842 * the procedure arguments. This context is only searched if an
2843 * identifier cannot be found through normal means so local
2844 * variables and such will override it as the programmer should
2845 * expect. Since the local semantic stack is under the
2846 * programmer's control, unexpected collisions should either not
2847 * occur or be easily fixed.
2850 switch (atype->ty_Op) {
2852 atype = atype->ty_RefType.et_Type;
2855 atype = atype->ty_RawPtrType.et_Type;
2858 if (atype->ty_Op != TY_CLASS)
2862 save_asg = sg->sg_AltContext;
2863 sg->sg_AltContext = atype->ty_ClassType.et_SemGroup;
2869 * Resolve the right hand side, which are the procedure arguments
2870 * as a compound type. This can get tricky. XXX
2872 * NOTE: We inherit the SF_LVALUE flag from the return type.
2873 * Parent might turn it off.
2875 /* d = exLhs->ex_Decl; */
2876 exRhs = ResolveExp(isg, sg, exRhs,
2877 ltype->ty_ProcType.et_ArgsType,
2878 flags | RESOLVE_AUTOCAST);
2879 exType = ltype->ty_ProcType.et_RetType;
2882 * Restore AltContext after resolving rhs.
2884 sg->sg_AltContext = save_asg;
2885 } else if ((exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) &&
2886 (exLhs->ex_Decl->d_ScopeFlags & SCOPE_PURE)) {
2888 * atype NULL (not method call, which requires an object),
2889 * arguments can become constants, pure function, so result
2890 * can become a constant.
2892 exFlags |= EXF_PROBCONST;
2896 * Additional work to inline the procedure
2898 resolveDynamicProcedure(isg, sg, exp, flags);
2899 resolveProcedureInline(isg, sg, exp, flags);
2902 case TOK_INLINE_CALL:
2904 * An inlined call has already resolved via TOK_CALL. It will not be
2905 * a constant, and any argument modifications have already been
2911 Type *atype; /* type for alt context */
2912 SemGroup *save_asg; /* save old alt context */
2914 exLhs->ex_Flags |= EXF_REQ_PROC;
2915 exLhs->ex_Flags |= EXF_ADDRUSED;
2916 exLhs = ResolveExp(isg, sg, exLhs, NULL, flags & ~RESOLVE_AUTOCAST);
2918 ltype = exLhs->ex_Type;
2922 * Try to set an alternative search context during resolution of
2923 * the procedure arguments. This context is only searched if an
2924 * identifier cannot be found through normal means so local
2925 * variables and such will override it as the programmer should
2926 * expect. Since the local semantic stack is under the
2927 * programmer's control, unexpected collisions should either not
2928 * occur or be easily fixed.
2930 if (ltype->ty_SQFlags & SF_METHOD) {
2933 rhs = exRhs->ex_Lhs;
2934 atype = rhs->ex_Type;
2939 switch (atype->ty_Op) {
2941 atype = atype->ty_RefType.et_Type;
2944 atype = atype->ty_RawPtrType.et_Type;
2947 if (atype->ty_Op != TY_CLASS)
2951 save_asg = sg->sg_AltContext;
2952 sg->sg_AltContext = atype->ty_ClassType.et_SemGroup;
2956 exRhs = ResolveExp(isg, sg, exRhs,
2957 ltype->ty_ProcType.et_ArgsType,
2958 flags | RESOLVE_AUTOCAST);
2961 sg->sg_AltContext = save_asg;
2964 exType = ltype->ty_ProcType.et_RetType;
2965 ResolveStmt(d->d_ImportSemGroup, exp->ex_AuxStmt, flags);
2970 * (NOTE EARLY RETURN)
2972 * A compound expression should always be an RVALUE, but might
2973 * contain LVALUEs (XXX).
2976 exp = resolveCompoundExp(isg, sg, exp, itype, flags);
2981 * (NOTE EARLY RETURN)
2984 exp = resolveBracketedExp(isg, sg, exp, itype, flags);
2989 * The caller must be able to handle a type return when typeof() is
2992 dassert_exp(exp, exFlags & EXF_REQ_TYPE);
2997 * If an expression was supplied, convert it to a type.
2999 * NOTE: ex_Flags hints must 'always happen' since we may be
3000 * modifying an expression that will later be Dup'd.
3003 if ((exFlags & EXF_RET_TYPE) == 0) {
3004 dassert(exLhs != NULL);
3005 exLhs->ex_Flags |= EXF_REQ_TYPE;
3006 exLhs = ResolveExp(isg, sg, exLhs, NULL,
3007 flags & ~RESOLVE_AUTOCAST);
3008 exType = exLhs->ex_Type;
3010 /* do not clear EXF_UNARY, messes up tmp exp storage */
3011 /* exFlags &= ~EXF_UNARY; */
3013 exFlags |= EXF_RET_TYPE;
3014 /* XXX delete the lhs */
3016 ResolveType(exType, NULL, 0);
3020 * Create appropriate integer constants for sizeof() and
3025 exp->ex_Token = TOK_INTEGER;
3026 exp->ex_Tmp.ts_USize = exType->ty_Bytes;
3027 exType = &USizeType;
3028 exFlags &= ~EXF_RET_TYPE;
3029 exFlags |= EXF_CONST;
3032 dassert_exp(exp, (exType->ty_Flags & TF_RESOLVING) == 0);
3033 dassert_exp(exp, exType->ty_Op == TY_ARYOF);
3034 if (exType->ty_AryType.et_Type->ty_Bytes) {
3035 exp->ex_Tmp.ts_USize = exType->ty_Bytes /
3036 exType->ty_AryType.et_Type->ty_Bytes;
3038 exp->ex_Tmp.ts_USize = 0;
3040 exp->ex_Token = TOK_INTEGER;
3041 exType = &USizeType;
3042 exFlags &= ~EXF_RET_TYPE;
3043 exFlags |= EXF_CONST;
3047 /* type is returned */
3054 dassert_exp(exp, 0);
3059 * Ensure that the cast target type is resolved.
3062 ResolveType(exType, NULL, 0);
3063 /* XXX exType was ex_Type */
3066 * If the type hint did not succeed we may have to cast the
3067 * expression to the requested type. Note that if the itype was set
3068 * as part of an array optimization request which could not be
3069 * handled, we must ignore itype.
3071 * Note that SimilarType() will allow exp->ex_Type to be a var-args
3072 * TY_ARGS, and since the original Rhs of a call is set to the
3073 * procedure arguments type, VarType.et_Type should match exactly.
3076 (exFlags & (EXF_REQ_ARRAY | EXF_RET_ARRAY)) != EXF_REQ_ARRAY)
3078 if ((itype->ty_Flags & TF_RESOLVED) == 0)
3079 ResolveType(itype, NULL, 0);
3081 // if ((itype->ty_SQFlags & SF_LVALUE) &&
3082 // (exType->ty_SQFlags & SF_LVALUE) == 0)
3083 if ((itype->ty_SQFlags & SF_LVALUE) &&
3084 (exFlags2 & EX2F_LVALUE) == 0)
3087 fprintf(stderr, "Exp must be an lvalue here\n");
3088 dassert_exp(exp, 0);
3091 if (!SimilarType(itype, exType) &&
3092 (flags & RESOLVE_AUTOCAST)) {
3093 if (exp->ex_Flags & EXF_DUPEXP) {
3094 Exp *nexp = AllocExp(NULL);
3096 nexp->ex_Tmp = exp->ex_Tmp;
3097 LexDupRef(&exp->ex_LexRef, &nexp->ex_LexRef);
3099 exFlags &= ~EXF_DUPEXP;
3100 /* exp = DupExp(sg, exp); */
3102 exFlags |= EXF_RESOLVED;
3103 exp = resolveExpCast(isg, sg, exp, itype, flags);
3109 * Generic constant evaluation flag. Note that EXF_PROBCONST could also
3110 * be set above (TOK_CALL).
3113 (exLhs == NULL || (exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST))) &&
3114 (exRhs == NULL || (exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)))) {
3115 exp->ex_Flags |= EXF_PROBCONST;
3117 exp->ex_Flags |= EXF_RESOLVED;
3123 * Resolve an expression for which the resolver needs the result immediately.
3126 resolveConstExp(SemGroup *isg, SemGroup *sg, Exp *exp, int flags)
3128 urunesize_t tmpbytes;
3129 urunesize_t tmpalign;
3130 srunesize_t ooffset;
3133 flags &= ~RESOLVE_AUTOCAST;
3135 if ((exp->ex_Flags & EXF_RESOLVED) == 0) {
3136 exp = ResolveExp(isg, sg, exp, NULL, flags);
3138 if ((exp->ex_Flags & EXF_RESOLVED) == 0) {
3139 fprintf(stderr, "early resolve for constant expression failed\n");
3140 LexPrintRef(&exp->ex_LexRef, 0);
3144 rstate = exp->ex_RState;
3145 ooffset = exp->ex_TmpOffset;
3148 resolveExpAlign(exp, &tmpalign, RESOLVE_FINALIZE);
3149 resolveExpStorage(exp, RESOLVE_FINALIZE, 0, &tmpbytes);
3151 if (tmpbytes < sg->sg_TmpBytes)
3152 tmpbytes = sg->sg_TmpBytes;
3154 if ((exp->ex_Flags & (EXF_CONST | EXF_PROBCONST)) == 0) {
3155 if (flags & RESOLVE_FAILOK)
3157 dfatal_exp(exp, TOK_ERR_EXPECTED_INTEGRER_CONST, NULL);
3163 * Special interpreter execution to resolve the expression.
3170 bzero(&ct, offsetof(RunContext, ct_TmpCtxObjInfo));
3171 ct.ct_Flags |= CTF_RESOLVING;
3174 * NOTE: minimum alignment for posix_memalign() is sizeof(void *).
3176 align = sg->sg_TmpAlignMask + 1;
3177 if (align < sizeof(void *)) /* posix_memalign requirement */
3178 align = sizeof(void *);
3180 if (tmpbytes <= sizeof(ct.ct_TmpCtxObjData) &&
3181 align <= sizeof(float128_t))
3183 info = &ct.ct_TmpCtxObjInfo;
3184 initObjectInfo(info, &VoidType, RSOP_TMPSPACE);
3185 ct.ct_TmpData = (void *)&ct.ct_TmpCtxObjData;
3187 if (align < sizeof(float128_t))
3188 align = sizeof(float128_t);
3189 info = allocObjectInfo(&VoidType, RSOP_TMPSPACE, tmpbytes, align);
3190 ct.ct_TmpData = info->in_Data.od_Base;
3192 ct.ct_CtxObject = info;
3193 ct.ct_TmpBytes = tmpbytes;
3195 exp->ex_Run(&ct, &data, exp);
3197 if ((exp->ex_Flags & EXF_CONST) == 0) {
3198 dfatal_exp(exp, TOK_ERR_EXPECTED_INTEGRER_CONST, NULL);
3201 invalObjectInfo(info);
3205 * exp is now a constant, restore the original ex_TmpOffset for normal
3206 * execution/operation (the storage may be needed for large constants).
3208 if (rstate & RSF_STORAGE) {
3209 exp->ex_TmpOffset = ooffset;
3210 /* resolveExpStorage(exp, &tmpbytes); */
3212 exp->ex_TmpOffset = -1;
3213 exp->ex_RState &= ~(RSF_STORAGE | RSF_SUB_STORAGE);
3215 resolveExpAlign(exp, &tmpalign, RESOLVE_CLEAN | RESOLVE_FINALIZE);
3222 resolveConstExpBool(SemGroup *isg, SemGroup *sg, Exp *exp, int flags,
3225 urunesize_t tmpbytes;
3226 urunesize_t tmpalign;
3227 srunesize_t ooffset;
3230 flags &= ~RESOLVE_AUTOCAST;
3232 if ((exp->ex_Flags & EXF_RESOLVED) == 0) {
3233 exp = ResolveExp(isg, sg, exp, NULL, RESOLVE_FINALIZE);
3237 * [re]-resolve the storage from 0 so we can execute the expression.
3239 rstate = exp->ex_RState;
3240 ooffset = exp->ex_TmpOffset;
3243 resolveExpAlign(exp, &tmpalign, RESOLVE_FINALIZE);
3244 resolveExpStorage(exp, RESOLVE_FINALIZE, 0, &tmpbytes);
3246 if (tmpbytes < sg->sg_TmpBytes)
3247 tmpbytes = sg->sg_TmpBytes;
3249 if ((exp->ex_Flags & (EXF_CONST | EXF_PROBCONST)) == 0) {
3250 dfatal_exp(exp, TOK_ERR_EXPECTED_INTEGRER_CONST, NULL);
3256 * Special interpreter execution to resolve the expression.
3263 bzero(&ct, offsetof(RunContext, ct_TmpCtxObjInfo));
3264 ct.ct_Flags |= CTF_RESOLVING;
3267 * NOTE: minimum alignment for posix_memalign() is sizeof(void *).
3269 if (tmpbytes <= sizeof(ct.ct_TmpCtxObjData) &&
3270 tmpalign <= sizeof(float128_t))
3272 info = &ct.ct_TmpCtxObjInfo;
3273 ct.ct_TmpData = (void *)&ct.ct_TmpCtxObjData;
3274 initObjectInfo(info, &VoidType, RSOP_TMPSPACE);
3276 if (tmpalign < sizeof(float128_t))
3277 tmpalign = sizeof(float128_t);
3278 info = allocObjectInfo(&VoidType, RSOP_TMPSPACE,
3279 tmpbytes, tmpalign);
3280 ct.ct_TmpData = (void *)info->in_Data.od_Base;
3282 ct.ct_CtxObject = info;
3283 ct.ct_TmpBytes = tmpbytes;
3285 exp->ex_Run(&ct, &data, exp);
3288 if ((exp->ex_Flags & EXF_CONST) == 0) {
3289 dfatal_exp(exp, TOK_ERR_EXPECTED_INTEGRER_CONST, NULL);
3291 ts->ts_Bool = rts->ts_Bool;
3292 invalObjectInfo(info);
3296 * exp is now a constant, restore the original ex_TmpOffset for normal
3297 * execution/operation (the storage may be needed for large constants).
3299 if (rstate & RSF_STORAGE) {
3300 exp->ex_TmpOffset = ooffset;
3302 resolveExpStorage(exp, RESOLVE_FINALIZE, exp->ex_TmpOffset, &tmpbytes);
3304 exp->ex_TmpOffset = -1;
3305 exp->ex_RState &= ~(RSF_STORAGE | RSF_SUB_STORAGE);
3307 resolveExpAlign(exp, &tmpalign, RESOLVE_CLEAN | RESOLVE_FINALIZE);
3313 * Extract constant from already-constant-resolved expression.
3314 * resolveConstExp() must have previously been called on exp.
3316 * Expression must have already been constant-optimized, meaning that we
3317 * should be able to execute it without a context to access the cached
3318 * results in exp->u.
3320 * (This can also be called by the generator)
3323 resolveGetConstExpInt64(Exp *exp)
3328 dassert_exp(exp, (exp->ex_Flags & EXF_CONST));
3329 exp->ex_Run(NULL, &data, exp);
3331 if (exp->ex_Type->ty_Flags & TF_ISUNSIGNED) {
3332 switch (exp->ex_Type->ty_Bytes) {
3334 value = *(uint8_t *) data.data;
3337 value = *(uint16_t *) data.data;
3340 value = *(uint32_t *) data.data;
3343 value = *(uint64_t *) data.data;
3347 dassert_exp(exp, 0);
3351 switch (exp->ex_Type->ty_Bytes) {
3353 value = *(int8_t *) data.data;
3356 value = *(int16_t *) data.data;
3359 value = *(int32_t *) data.data;
3362 value = *(int64_t *) data.data;
3366 dassert_exp(exp, 0);
3374 resolveGetConstExpFloat128(Exp *exp)
3379 dassert_exp(exp, exp->ex_Token == TOK_FLOAT ||
3380 (exp->ex_Flags & EXF_CONST));
3381 exp->ex_Run(NULL, &data, exp);
3383 switch (exp->ex_Type->ty_Bytes) {
3385 value = (float128_t) *(float32_t *) data.data;
3388 value = (float128_t) *(float64_t *) data.data;
3391 value = *(float128_t *) data.data;
3395 dassert_exp(exp, 0);
3402 * resolveCompoundExp() - resolve a compound expression (called from
3403 * ResolveExp() and resolveExpOper()).
3405 * Resolve a compound expression. Compound expressions require a compound
3406 * type to normalize against. This will work for direct assignments, return
3407 * values, casts, and procedure arguments only.
3409 * NOTE: We can't use itype if EXF_REQ_ARRAY is specified because its hinting
3410 * for the array optimization case, which we cannot do.
3412 * Compound expressions may be used in conjuction with types representing
3413 * classes, compound types, and procedure arguments. The compound expression
3414 * may contain subclasses of the superclasses expected by itype. This is
3415 * only allowed if the procedure's body has not yet been generated (for
3416 * example, a method call in a subclass).
3418 * Partially resolved operators are typically converted into procedure calls
3419 * and method calls are also partially resolved, so some elements may already
3422 * XXX named initialization, missing elements (structural initialization),
3423 * and so forth needs to be dealt with.
3426 resolveCompoundExp(SemGroup *isg, SemGroup *sg, Exp *exp,
3427 Type *itype, int flags)
3438 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
3441 * Expression dup()ing
3443 if (exp->ex_Flags & EXF_DUPEXP) {
3446 fprintf(stderr, "DUPEXPC %d\n", ++count);
3448 exp = DupExp(sg, exp);
3451 if (itype && (exp->ex_Flags & EXF_REQ_ARRAY) == 0)
3452 exp->ex_Type = itype;
3455 * If we don't have a SemGroup to normalize against, XXX how should we
3456 * normalize the compound expression?
3458 if (exp->ex_Type == NULL) {
3459 dassert_exp(exp, 0);
3463 * Normalize the compound expression based on the argument types expected
3464 * by the procedure. We have to resolve the type before we start the
3465 * scan in order to ensure that d_Offset is properly assigned.
3467 * Use the declarations found in the compound type semantic group to
3468 * coerce the procedure arguments to generate the correct compound type.
3469 * Note that ResolveExp() recursion must still use the SemGroup that was
3472 * XXX deal with defaults and pre-resolved arguments. XXX
3474 type = ResolveType(exp->ex_Type, NULL, 0);
3476 switch (type->ty_Op) {
3478 sg2 = type->ty_ArgsType.et_SemGroup;
3481 sg2 = type->ty_VarType.et_SemGroup;
3484 sg2 = type->ty_CompType.et_SemGroup;
3487 sg2 = type->ty_ClassType.et_SemGroup;
3490 dassert_exp(exp, 0);
3491 sg2 = NULL; /* NOT REACHED */
3494 pscan = &exp->ex_Lhs;
3497 * Scan the compound expression and match it up against the compound
3500 d = RUNE_FIRST(&sg2->sg_DeclList);
3501 while ((scan = *pscan) != NULL) {
3502 if (scan->ex_ArgId) {
3504 * Named argument, find it
3506 * (Overloading not allowed)
3508 int eno = TOK_ERR_ID_NOT_FOUND;
3511 nd = FindDeclId(sg2, scan->ex_ArgId, &eno);
3513 dfatal_exp(scan, eno, NULL);
3518 * XXX for now, punt on setting EXF_PROBCONST if the named
3519 * argument skips a declaration.
3521 if (nd != d && (d == NULL || nd != RUNE_NEXT(d, d_Node))) {
3527 * Unnamed argument, run through sequentially. Skip any
3528 * non-storage or global storage.
3530 while (d && d->d_Op != DOP_ARGS_STORAGE &&
3531 d->d_Op != DOP_STACK_STORAGE &&
3532 d->d_Op != DOP_GROUP_STORAGE)
3534 d = RUNE_NEXT(d, d_Node);
3538 * Ran out of storage declarations. If this is a var-args
3539 * SemGroup then we actually create a new SemGroup (and
3540 * eventually a new type) to represent it.
3542 * We then extend the varargs SemGroup. This isn't pretty.
3545 if (varargs == 0 && (sg2->sg_Flags & SGF_VARARGS)) {
3546 sg2 = DupSemGroup(sg2->sg_Parent, NULL, sg2, 1);
3551 "Too many arguments in "
3553 dassert_exp(scan, 0);
3559 * Unlink the expression from the compound list temporarily so we can
3560 * safely resolve it. Either cast the expression to the compound
3561 * element, or create a compound element (e.g. varargs call) to match
3564 * Due to the resolver moving things around, the elements of a
3565 * compound expression are sometimes resolved multiple times.
3567 *pscan = scan->ex_Next;
3568 scan->ex_Next = NULL;
3570 stype = scan->ex_Type;
3574 * Compound declaration (e.g. argument decl) for the argument
3575 * we are stuffing the expression into.
3577 Type *dtype = d->d_StorDecl.ed_Type;
3581 * Do not cast ptr to lvalue-void-ptr or ref to lvalue-void-ref,
3582 * the lvalue needs to have the original ptr or ref type.
3584 if ((dtype->ty_SQFlags & SF_LVALUE) &&
3585 stype->ty_Op == TY_PTRTO /*&&
3586 SimilarType(dtype, &VoidPtrType)*/)
3589 sflags = flags & ~RESOLVE_AUTOCAST;
3590 } else if ((dtype->ty_SQFlags & SF_LVALUE) &&
3591 stype->ty_Op == TY_REFTO /*&&
3592 SimilarType(dtype, &VoidRefType)*/)
3595 sflags = flags & ~RESOLVE_AUTOCAST;
3597 sflags = flags | RESOLVE_AUTOCAST;
3601 * LValueStor's need an address, set ADDRUSED.
3603 if (d->d_ScopeFlags & SCOPE_LVALUE)
3604 scan->ex_Flags |= EXF_ADDRUSED;
3606 if ((scan->ex_Flags & EXF_RESOLVED) == 0) {
3607 scan = ResolveExp(isg, sg, scan, dtype, sflags);
3610 * Cast the argument (scan) to the expected (dtype).
3612 * Since we have already resolved the expression we need to
3613 * do the same sanity checking that it would do to cast.
3615 * NOTE! Do NOT insert a cast when the target type is
3616 * lvalue void * or lvalue void @. Otherwise the
3617 * lv_Type loaded into the LValueStor will be incorrect
3618 * for operations, e.g. stdin.new()
3620 //dassert_exp(scan, (dtype->ty_SQFlags & SF_LVALUE) == 0 ||
3621 // (scan->ex_Type->ty_SQFlags & SF_LVALUE));
3622 dassert_exp(scan, (dtype->ty_SQFlags & SF_LVALUE) == 0 ||
3623 (scan->ex_Flags2 & EX2F_LVALUE));
3625 if (!SimilarType(dtype, scan->ex_Type)) {
3629 scan = resolveExpCast(isg, sg, scan, dtype, flags);
3634 * var-arg. Use the same type but do not pass as an lvalue
3635 * or as constant storage (we are copying). Constant-storage
3636 * would also trip-up later checks.
3638 Scope tscope = INIT_SCOPE(0);
3640 if ((scan->ex_Flags & EXF_RESOLVED) == 0) {
3641 scan = ResolveExp(isg, sg, scan, NULL,
3642 flags & ~RESOLVE_AUTOCAST);
3644 dassert(varargs != 0);
3645 d = AllocDeclaration(sg2, DOP_ARGS_STORAGE, &tscope);
3646 d->d_StorDecl.ed_Type = DEL_LVALUE_CONST(scan->ex_Type);
3648 d->d_Bytes = scan->ex_Type->ty_Bytes;
3649 d->d_AlignMask = scan->ex_Type->ty_AlignMask;
3652 * __align(%d) scope qualifier, override the type's alignment
3654 if ((d->d_Scope.s_Flags & SCOPE_ALIGN) &&
3655 d->d_Scope.s_AlignOverride) {
3656 d->d_AlignMask = d->d_Scope.s_AlignOverride - 1;
3659 d->d_Offset = sg2->sg_Bytes;
3663 * Relink and check if constant
3665 scan->ex_Next = *pscan;
3667 if ((scan->ex_Flags & (EXF_CONST | EXF_PROBCONST)) == 0)
3669 stype = scan->ex_Type;
3672 * If the declaration requires an LVALUE, assert that we have an
3673 * lvalue. Otherwise set the direct-store request (also see
3674 * InterpCompoundExp).
3676 if (d->d_ScopeFlags & SCOPE_LVALUE) {
3677 //if ((stype->ty_SQFlags & SF_LVALUE) == 0)
3678 if ((scan->ex_Flags2 & EX2F_LVALUE) == 0)
3679 fprintf(stderr, "argument must be an lvalue\n");
3680 dassert_exp(scan, (scan->ex_Flags2 & EX2F_LVALUE));
3684 * Catch a programmer's mistake, passing an argument as constant
3685 * storage. An argument is not contant storage.
3687 if (type->ty_Op == TY_ARGS && (d->d_Op & DOPF_STORAGE)) {
3688 if (d->d_StorDecl.ed_Type->ty_SQFlags & SF_CONST) {
3689 dfatal_decl(d, TOK_ERR_READONLY_ARG, NULL);
3696 d = RUNE_NEXT(d, d_Node);
3697 pscan = &scan->ex_Next;
3701 * Make sure the caller knows its a var-args function even if we didn't
3702 * supply any additional args. Otherwise the backend may not generate
3703 * the correct form for calls to the target.
3706 (sg2->sg_Flags & SGF_VARARGS)) {
3707 sg2 = DupSemGroup(sg2->sg_Parent, NULL, sg2, 1);
3712 * Resolve the varargs sg2 after building it.
3715 ResolveSemGroup(sg2, 0);
3719 * If we made a var-args call, adjust the expression's type
3722 dassert(type->ty_Op == TY_ARGS);
3723 exp->ex_Type = ResolveType(TypeToVarType(type, sg2), NULL, 0);
3726 exp->ex_Flags |= EXF_PROBCONST;
3728 exp->ex_Flags |= EXF_RESOLVED;
3733 * resolveBracketedExp() - resolve a bracketed expression.
3735 * Resolve a bracketed expression. Bracketed expressions require an array
3736 * type to normalize against.
3738 * The bracketed expressions may contain subclasses of the superclasses
3739 * expected by itype.
3742 resolveBracketedExp(SemGroup *isg, SemGroup *sg, Exp *exp,
3743 Type *itype, int flags)
3751 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
3754 * Expression dup()ing
3756 if (exp->ex_Flags & EXF_DUPEXP) {
3759 fprintf(stderr, "DUPEXPC %d\n", ++count);
3761 exp = DupExp(sg, exp);
3765 * Expression type is the hinted type.
3767 if (itype && (exp->ex_Flags & EXF_REQ_ARRAY) == 0)
3768 exp->ex_Type = itype;
3771 * We need a type to normalize against.
3773 if (exp->ex_Type == NULL) {
3774 dassert_exp(exp, 0);
3779 * Normalize the bracketed expression based on the array type. We have
3780 * to resolve the type before we start the scan in order to ensure that
3781 * d_Offset is properly assigned.
3783 type = ResolveType(exp->ex_Type, NULL, 0);
3784 if (type->ty_Op != TY_ARYOF) {
3785 dassert_exp(exp, 0);
3788 type = type->ty_AryType.et_Type; /* element type */
3791 * Scan the bracketed expression and match each element against the
3794 pscan = &exp->ex_Lhs;
3795 while ((scan = *pscan) != NULL) {
3800 * Unlink the expression from the compound list temporarily so we can
3801 * safely resolve it. Either cast the expression to the compound
3802 * element, or create a compound element (e.g. varargs call) to match
3805 * Due to the resolver moving things around, the elements of a
3806 * compound expression are sometimes resolved multiple times.
3808 *pscan = scan->ex_Next;
3809 scan->ex_Next = NULL;
3815 if ((SimilarType(dtype, &PointerType) ||
3816 SimilarType(dtype, &ReferenceType)) &&
3817 (dtype->ty_SQFlags & SF_LVALUE) == SF_LVALUE)
3820 sflags = flags & ~RESOLVE_AUTOCAST;
3822 sflags = flags | RESOLVE_AUTOCAST;
3826 * LValueStor needs a RS, set ADDRUSED to make sure its available to
3829 if (dtype->ty_SQFlags & SF_LVALUE)
3830 scan->ex_Flags |= EXF_ADDRUSED;
3832 if ((scan->ex_Flags & EXF_RESOLVED) == 0) {
3833 scan = ResolveExp(isg, sg, scan, dtype, sflags);
3836 * Since we have already resolved the expression we need to do
3837 * the same sanity checking that it would do to cast.
3841 (dtype->ty_SQFlags & SF_LVALUE) == 0 ||
3842 (scan->ex_Type->ty_SQFlags & SF_LVALUE));
3845 (dtype->ty_SQFlags & SF_LVALUE) == 0 ||
3846 (scan->ex_Flags2 & EX2F_LVALUE));
3847 if (!SimilarType(dtype, scan->ex_Type)) {
3848 scan = resolveExpCast(isg, sg, scan, dtype, flags);
3853 * Relink and check if constant
3855 scan->ex_Next = *pscan;
3857 if ((scan->ex_Flags & (EXF_CONST | EXF_PROBCONST)) == 0)
3859 //stype = scan->ex_Type;
3862 * If the declaration requires an LVALUE, assert that we have an
3863 * lvalue. Otherwise set the direct-store request (also see
3864 * InterpCompoundExp).
3866 if (dtype->ty_SQFlags & SF_LVALUE) {
3867 //if ((stype->ty_SQFlags & SF_LVALUE) == 0)
3868 if ((scan->ex_Flags2 & EX2F_LVALUE) == 0)
3869 fprintf(stderr, "argument must be an lvalue\n");
3870 dassert_exp (scan, (scan->ex_Flags2 & EX2F_LVALUE));
3872 pscan = &scan->ex_Next;
3876 exp->ex_Flags |= EXF_PROBCONST;
3877 exp->ex_Flags |= EXF_RESOLVED;
3883 * resolveExpCast() - Cast the expression to the specified type and return
3884 * the cast expression.
3886 * Note that expression nodes depend on their ex_Type being correct, and also
3887 * expressions may be shared, so be careful not to modify the ex_Type (or
3888 * anything else) in the existing expression.
3890 * This code is somewhat different then resolveExpOper() and friends. The Exp
3891 * argument has already been resolved so do not resolve it again.
3893 * As with operators we have to locate the cast declaration matching the cast
3897 resolveExpCast(SemGroup *isg, SemGroup *sg, Exp *exp, Type *ltype, int flags)
3904 flags &= ~RESOLVE_AUTOCAST;
3907 rtype = exp->ex_Type;
3908 dassert(rtype && ltype);
3910 * XXX attempt to cast from subclass to superclass?
3914 * XXX look in our local semantic hierarchy for a compatible cast ?
3916 dassert(ltype->ty_Op != TY_UNRESOLVED);
3917 dassert(rtype->ty_Op != TY_UNRESOLVED);
3920 * Look in the right hand (source) type for the cast
3922 d = findCast(rtype, ltype, rtype, flags);
3925 * If that fails then look in the left hand (destination) type for the
3929 d = findCast(ltype, ltype, rtype, flags);
3933 * Look for pointer or reference type casts
3935 if (d == NULL && rtype->ty_Op == TY_PTRTO) {
3936 d = findCast(&PointerType, ltype, rtype, flags);
3938 if (d == NULL && rtype->ty_Op == TY_REFTO) {
3939 d = findCast(&ReferenceType, ltype, rtype, flags);
3944 * We could not find a specific cast operator. There are some
3945 * inherent casts that we can do. We run through these in attempt to
3946 * come up with matching types.
3948 if (ltype->ty_Op != rtype->ty_Op &&
3949 (ltype->ty_Op == TY_PTRTO || ltype->ty_Op == TY_ARYOF) &&
3950 (rtype->ty_Op == TY_PTRTO || rtype->ty_Op == TY_ARYOF))
3953 * Pointers or arrays can be cast to pointers of the same
3956 * Cast the right hand type to an equivalent * pointer/array
3957 * of the right hand type and re-resolve the cast.
3959 exp = ExpToCastExp(exp,
3960 ResolveType(ChangeType(rtype, ltype->ty_Op), NULL, 0));
3961 return (resolveExpCast(isg, sg, exp, ltype, flags));
3962 } else if (MatchType(ltype, rtype) <= SG_COMPAT_PART) {
3964 * If the types are compatible (casting rtype->ltype), we can
3967 exp = ExpToCastExp(exp, ltype);
3968 } else if (MatchType(&NumericType, ltype) <= SG_COMPAT_SUBCLASS &&
3969 MatchType(&NumericType, rtype) <= SG_COMPAT_SUBCLASS) {
3971 * Casting from one numeric type to another must be supported by
3972 * the interpreter/compiler.
3974 exp = ExpToCastExp(exp, ltype);
3975 } else if (SimilarType(&VoidType, ltype)) {
3977 * Casting anything to void is allowed (throwing the object
3978 * away). E.g. statement-expressions.
3980 exp = ExpToCastExp(exp, ltype);
3981 } else if (SimilarType(&VoidPtrType, ltype)) {
3983 * Casting a pointer to a (void *) is trivial, but is only
3984 * allowed if the underlying structure does not contain any
3987 * NOTE: Generally only used when a pointer is being cast to an
3988 * integer. Rune does not allow casting back to other pointer
3991 * XXX validate integral # of objects fit in pointer range.
3993 if (rtype->ty_RawPtrType.et_Type->ty_Flags & TF_HASLVREF)
3994 dfatal_exp(exp, TOK_ERR_LIMITED_VOIDP_CAST, NULL);
3995 exp = ExpToCastExp(exp, ltype);
3996 } else if (SimilarType(&VoidRefType, ltype)) {
3998 * Casting a pointer to a (void @) is trivial.
4000 * NOTE: Generally only used when a pointer is being cast to an
4001 * integer. Rune does not allow casting back to other pointer
4004 * XXX validate integral # of objects fit in pointer range.
4006 if (rtype->ty_RawPtrType.et_Type->ty_Flags & TF_HASLVREF)
4007 dfatal_exp(exp, TOK_ERR_LIMITED_VOIDP_CAST, NULL);
4008 exp = ExpToCastExp(exp, ltype);
4009 } else if (SimilarType(rtype, &VoidPtrType)) {
4011 * Casting from a void pointer may not be trivial but we leave it
4012 * up to the interpreter/compiler.
4014 * Only allow if the target does not contain any pointers or if
4015 * the right-hand-side is NULL.
4017 * XXX validate integral # of objects fit in pointer range.
4019 switch (ltype->ty_Op) {
4021 if ((exp->ex_Flags & EXF_NULL) == 0 &&
4022 (ltype->ty_RefType.et_Type->ty_Flags & TF_HASLVREF))
4024 dfatal_exp(exp, TOK_ERR_LIMITED_VOIDP_CAST, NULL);
4030 exp = ExpToCastExp(exp, ltype);
4031 } else if (SimilarType(rtype, &CVoidPtrType)) {
4032 switch (ltype->ty_Op) {
4034 if ((exp->ex_Flags & EXF_NULL) == 0 &&
4035 (ltype->ty_RawPtrType.et_Type->ty_Flags & TF_HASLVREF)) {
4036 dfatal_exp(exp, TOK_ERR_LIMITED_VOIDP_CAST, NULL);
4042 } else if (SimilarType(ltype, &BoolType) &&
4043 (rtype->ty_Op == TY_PTRTO ||
4044 rtype->ty_Op == TY_REFTO))
4047 * Any pointer can be cast to a boolean, which tests against
4050 exp = ExpToCastExp(exp, ltype);
4051 } else if (ltype->ty_Op == rtype->ty_Op &&
4052 (ltype->ty_Op == TY_PTRTO || ltype->ty_Op == TY_ARYOF))
4055 * We allow casts of pointers to similar numeric types if they
4056 * are the same size, though this is really rather a hack. This
4057 * is mainly to handle the signed<->unsigned cast case. XXX
4061 switch (ltype->ty_Op) {
4063 if ((ltype->ty_RawPtrType.et_Type->ty_SQFlags &
4065 (rtype->ty_RawPtrType.et_Type->ty_SQFlags &
4068 dfatal_exp(exp, TOK_ERR_READONLY, NULL);
4070 if (MatchType(&NumericType, ltype->ty_RawPtrType.et_Type) <=
4071 SG_COMPAT_SUBCLASS &&
4072 MatchType(&NumericType, rtype->ty_RawPtrType.et_Type) <=
4073 SG_COMPAT_SUBCLASS &&
4074 ltype->ty_Bytes == rtype->ty_Bytes)
4076 exp = ExpToCastExp(exp, ltype);
4081 if ((ltype->ty_AryType.et_Type->ty_SQFlags & SF_CONST) == 0 &&
4082 (rtype->ty_AryType.et_Type->ty_SQFlags & SF_CONST) != 0) {
4083 dfatal_exp(exp, TOK_ERR_READONLY, NULL);
4085 if (MatchType(&NumericType, ltype->ty_AryType.et_Type) <=
4086 SG_COMPAT_SUBCLASS &&
4087 MatchType(&NumericType, rtype->ty_AryType.et_Type) <=
4088 SG_COMPAT_SUBCLASS &&
4089 ltype->ty_Bytes == rtype->ty_Bytes)
4091 exp = ExpToCastExp(exp, ltype);
4098 "Unable to resolve cast from pointers "
4099 "to dissimilar numeric types "
4101 TypeToStr(rtype, NULL),
4102 TypeToStr(ltype, NULL));
4103 dassert_exp(exp, 0);
4105 } else if (didagain == 0 &&
4106 (oflags & RESOLVE_AUTOCAST) &&
4107 (exp->ex_Flags2 & EX2F_WASCOMP) &&
4108 ltype->ty_Op == TY_COMPOUND &&
4109 rtype->ty_Op != TY_COMPOUND) {
4111 * The expression parser might have optimized-out the
4112 * TOK_COMPOUND wrapper around single-element parenthesized
4113 * expressions. Add it back in if the cast target expects a
4114 * compound expression.
4116 * XXX Currently hack a SetDupExp() to avoid re-resolving the
4117 * already-resolved component.
4119 exp = ExpToCompoundExp(exp, TOK_COMPOUND);
4120 exp = resolveCompoundExp(isg, sg, exp, ltype, flags);
4123 } else if (didagain == 0 &&
4124 (oflags & RESOLVE_AUTOCAST) &&
4125 (exp->ex_Flags2 & EX2F_WASCOMP) &&
4126 ltype->ty_Op == TY_CLASS &&
4127 rtype->ty_Op == TY_CLASS &&
4128 ltype != &VoidType &&
4129 (ltype->ty_Flags & (TF_ISBOOL | TF_ISINTEGER |
4130 TF_ISFLOATING)) == 0 &&
4131 (rtype->ty_Flags & (TF_ISBOOL | TF_ISINTEGER |
4134 * The expression parser might have optimized-out the
4135 * TOK_COMPOUND wrapper around single-element parenthesized
4136 * expressions used in a class iterator (in an assignment). Add
4137 * it back in if the ltype is a non-core class and rtype is a
4140 * XXX Currently hack a SetDupExp() to avoid re-resolving the
4141 * already-resolved component.
4143 exp = ExpToCompoundExp(exp, TOK_COMPOUND);
4144 exp = resolveCompoundExp(isg, sg, exp, ltype, flags);
4149 "Unable to resolve cast from %s to %s\n",
4150 TypeToStr(rtype, NULL),
4151 TypeToStr(ltype, NULL));
4152 dassert_exp(exp, 0);
4154 } else if (d->d_ScopeFlags & SCOPE_INTERNAL) {
4156 * We found a cast operator and it is an internal operator
4158 exp = ExpToCastExp(exp, ltype);
4162 * We found a cast operator and it is a Rune cast procedure. We must
4163 * convert the cast to a procedure call. If we want
4164 * resolveCompoundExp() to be able to generate a compatible procedure
4165 * (in a subclass) we have to tell it about the procedure.
4169 sexp = ExpToCompoundExp(exp, TOK_COMPOUND);
4170 if (d->d_ProcDecl.ed_ProcBody == NULL) /* XXX */
4172 sexp = resolveCompoundExp(isg, sg, sexp,
4173 d->d_ProcDecl.ed_Type->ty_ProcType.et_ArgsType,
4175 exp = AllocExp(NULL);
4176 exp->ex_Lhs = AllocExp(NULL);
4177 exp->ex_Lhs->ex_Token = TOK_DECL;
4178 exp->ex_Lhs->ex_Id = d->d_Id;
4179 exp->ex_Lhs->ex_Decl = d;
4180 exp->ex_Lhs->ex_Type = d->d_ProcDecl.ed_Type;
4181 exp->ex_Lhs->ex_Flags |= EXF_RESOLVED;
4183 exp->ex_Flags |= EXF_BINARY;
4184 exp->ex_Token = TOK_CALL;
4185 /* XXX use ltype or procedure's rettype? */
4186 exp->ex_Type = ltype;
4187 LexDupRef(&sexp->ex_LexRef, &exp->ex_LexRef);
4188 LexDupRef(&sexp->ex_LexRef, &exp->ex_Lhs->ex_LexRef);
4193 * Additional work to inline the procedure
4195 resolveDynamicProcedure(isg, sg, exp, flags);
4196 resolveProcedureInline(isg, sg, exp, flags);
4198 exp->ex_Flags |= EXF_RESOLVED;
4204 findCast(Type *btype, Type *ltype, Type *rtype, int flags)
4209 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
4211 dassert(rtype->ty_Op != TY_UNRESOLVED);
4212 dassert(ltype->ty_Op != TY_UNRESOLVED);
4215 * Locate the base type. If the base type does not have a SemGroup there
4216 * are no casts. (XXX put system operators here)
4218 sg = BaseType(&btype);
4219 dassert(btype->ty_Op != TY_UNRESOLVED);
4225 * Look for the cast in the SemGroup
4227 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
4228 if (d->d_Op == DOP_PROC && (d->d_ScopeFlags & SCOPE_CAST)) {
4229 ResolveType(d->d_ProcDecl.ed_Type, NULL, 0);
4230 if (MatchCastTypes(d, ltype, rtype))
4236 * Failed. If the base type is a compound type, look for the cast in the
4237 * SemGroup for each element making up the compound type. e.g. so
4238 * (mycustomtype, double) would find the cast in mycustomtype.
4240 if (btype->ty_Op == TY_COMPOUND) {
4241 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
4243 if (d->d_Op & DOPF_STORAGE) {
4244 ResolveType(d->d_StorDecl.ed_Type, NULL, 0);
4245 d2 = findCast(d->d_StorDecl.ed_Type,
4246 ltype, rtype, flags);
4247 } else if (d->d_Op == DOP_TYPEDEF) {
4248 ResolveType(d->d_StorDecl.ed_Type, NULL, 0);
4249 d2 = findCast(d->d_TypedefDecl.ed_Type,
4250 ltype, rtype, flags);
4263 * resolveExpOper() - resolve an operator
4265 * This is complex enough that it is broken out into its own procedure.
4266 * Normally we just look the operator up but we have to special case pointer
4267 * arithmatic because we do will not know until now that we have to do it.
4269 * itype is a return-type hint only. resolveExpOper() can ignore it if it
4270 * wishes. We currently use it to detect cast-to-void, such as when an
4271 * expression like "++i" is used in a for() loop or as a standalone
4272 * statement. This allows us to optimize the case.
4275 resolveExpOper(SemGroup *isg, SemGroup *sg, Exp *exp, Type *itype, int flags)
4278 int isPointerOp = 0;
4279 int isReferenceOp = 0;
4281 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
4283 dassert_exp(exp, exp->ex_Id != 0);
4284 if (exFlags & EXF_BINARY) {
4285 exLhs = ResolveExp(isg, sg, exLhs, NULL, flags);
4286 exRhs = ResolveExp(isg, sg, exRhs, NULL, flags);
4287 } else if (exFlags & EXF_UNARY) {
4288 exLhs = ResolveExp(isg, sg, exLhs, NULL, flags);
4290 dassert_exp(exp, 0);
4294 * If the lhs is a pointer look the operator up in the Pointer class
4295 * first. Operators in the Pointer class are special-cased. A second
4296 * pointer argument or a pointer return value must match the lhs pointer.
4298 * If this fails, or if the ltype is not a pointer, then look the
4299 * operator up normally.
4301 if (exLhs->ex_Type->ty_Op == TY_PTRTO) {
4305 if (exFlags & EXF_BINARY) {
4306 rtype = exRhs->ex_Type;
4307 ltype = exLhs->ex_Type;
4309 dassert(exFlags & EXF_UNARY);
4311 ltype = exLhs->ex_Type;
4313 d = findOper(&PointerType, exp->ex_Id, ltype, rtype, flags);
4317 d = findExpOper(exp, flags);
4318 } else if (exLhs->ex_Type->ty_Op == TY_REFTO) {
4322 if (exFlags & EXF_BINARY) {
4323 rtype = exRhs->ex_Type;
4324 ltype = exLhs->ex_Type;
4326 dassert(exFlags & EXF_UNARY);
4328 ltype = exLhs->ex_Type;
4330 d = findOper(&ReferenceType, exp->ex_Id, ltype, rtype, flags);
4334 d = findExpOper(exp, flags);
4336 d = findExpOper(exp, flags);
4340 * Fall through to finish up resolving the operator. We just set ex_Decl
4341 * for internal operators, and construct a call for non-internal
4342 * procedural operators.
4350 dassert_exp(exp, d != NULL);
4351 dassert_exp(exp, d->d_Op == DOP_PROC);
4352 dassert_exp(exp, d->d_ProcDecl.ed_Type->ty_Op == TY_PROC);
4353 type = d->d_ProcDecl.ed_Type;
4354 exType = type->ty_ProcType.et_RetType;
4357 * Special case for internal Pointer ops. The return type is the
4358 * left-hand type (we may still optimize it to void later).
4360 if (isReferenceOp &&
4361 (d->d_ScopeFlags & SCOPE_INTERNAL) &&
4362 SimilarType(&VoidRefType, exType))
4364 if (exType->ty_SQFlags & SF_LVALUE)
4365 exType = ADD_LVALUE(exLhs->ex_Type);
4367 exType = DEL_LVALUE(exLhs->ex_Type);
4371 (d->d_ScopeFlags & SCOPE_INTERNAL) &&
4372 SimilarType(&VoidPtrType, exType))
4374 if (exType->ty_SQFlags & SF_LVALUE)
4375 exType = ADD_LVALUE(exLhs->ex_Type);
4377 exType = DEL_LVALUE(exLhs->ex_Type);
4380 type = d->d_ProcDecl.ed_Type->ty_ProcType.et_ArgsType;
4381 dassert(type->ty_Op == TY_ARGS);
4382 sg2 = type->ty_ArgsType.et_SemGroup;
4385 * Assert that LVALUE requirements are met. XXX MatchType() code
4386 * should disallow the non-lvalue-cast-to-lvalue case so we don't
4387 * have to do a check here.
4389 RUNE_FOREACH(d2, &sg2->sg_DeclList, d_Node) {
4390 if ((d2->d_Op & DOPF_STORAGE) &&
4391 d2->d_Op != DOP_GLOBAL_STORAGE) {
4393 if ((d2->d_ScopeFlags & SCOPE_LVALUE) &&
4394 //(exLhs->ex_Type->ty_SQFlags & SF_LVALUE) == 0)
4395 (exLhs->ex_Flags2 & EX2F_LVALUE) == 0)
4397 fprintf(stderr, "lhs of exp must be lvalue\n");
4398 dassert_exp(exp, 0);
4400 } else if (count == 1) {
4401 if ((d2->d_ScopeFlags & SCOPE_LVALUE) &&
4402 //(exRhs->ex_Type->ty_SQFlags & SF_LVALUE) == 0)
4403 (exRhs->ex_Flags2 & EX2F_LVALUE) == 0)
4405 fprintf(stderr, "rhs of exp must be lvalue\n");
4406 dassert_exp(exp, 0);
4413 if (d->d_ScopeFlags & SCOPE_INTERNAL) {
4415 * Internal operator. Optimize any cast to void by having the
4416 * internal function deal with it. (since we aren't setting
4417 * exType the optimization currently doesn't do anything, see
4421 if (itype == &VoidType) {
4422 /* exType = itype; */
4423 exFlags |= EXF_RET_VOID;
4427 * Normal procedural operator. Convert the left and right hand
4428 * sides to a compound expression and convert exp to a TOK_CALL.
4429 * NOTE! ex_Rhs may be NULL (unary op).
4431 * The compound expression may need to rewrite a subclass
4432 * procedure, which it can do if the procedure's body has not yet
4433 * been created (or duplicated from the superclass). ex_Decl
4434 * must be set in this case.
4436 * Note that the expression structure may be shared. The
4437 * conversion is permanent so that is ok.
4439 * XXX keep the type intact?
4441 exLhs->ex_Next = exRhs;
4443 exRhs = ExpToCompoundExp(exRhs, TOK_COMPOUND);
4444 if (d->d_ProcDecl.ed_ProcBody == NULL)
4446 exRhs = resolveCompoundExp(isg, sg, exRhs, type, flags);
4447 exLhs = AllocExp(NULL);
4448 LexDupRef(&exp->ex_LexRef, &exLhs->ex_LexRef);
4449 exLhs->ex_Token = TOK_ID;
4450 exLhs->ex_Id = d->d_Id;
4452 exLhs->ex_Type = d->d_ProcDecl.ed_Type;
4453 exLhs->ex_Flags |= EXF_RESOLVED;
4454 exp->ex_Token = TOK_CALL;
4455 exFlags = EXF_BINARY;
4460 * Additional work to inline the procedure
4462 resolveDynamicProcedure(isg, sg, exp, flags);
4463 resolveProcedureInline(isg, sg, exp, flags);
4467 char buf[RUNE_IDTOSTR_LEN];
4468 fprintf(stderr, "Unable to resolve operator: %s\n",
4469 runeid_text(exp->ex_Id, buf));
4470 dassert_exp(exp, 0);
4474 * Flag a pure operator whos arguments are constants as probably being
4477 if (d->d_ScopeFlags & SCOPE_PURE) {
4478 if ((exLhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)) &&
4480 (exRhs->ex_Flags & (EXF_CONST | EXF_PROBCONST)))) {
4481 exFlags |= EXF_PROBCONST;
4485 exp->ex_Flags |= EXF_RESOLVED;
4491 * Helper, visibility must be properly set immediately, prior to any
4492 * circularity, to guarantee that search functions work without deferral.
4496 resvis_set(resvis_t *vis, int visibility)
4499 *vis->visp = visibility;
4505 * ResolveType() - Resolve a type (always returns its argument)
4507 * Resolve a type. Always returns consistent visibility information to the
4508 * caller, even if the resolution remains in-progress. Thus all
4509 * modifications to the resvis chain occurs on the front-end of any
4512 * Flags, Size and Alignment information might take several passes for
4513 * classes (due to chains of DF_DYNAMICREF'd processes), or arrays (due to
4514 * the * array size not being immediately resolvable).
4517 ResolveType(Type *type, resvis_t *vis, int retry)
4519 SemGroup *sg = NULL;
4525 myvis.visp = &dummy_vis;
4528 * Detect circular loop.
4530 if (type->ty_Flags & TF_RESOLVED) {
4531 resvis_set(vis, type->ty_Visibility);
4534 if (type->ty_Flags & TF_RESOLVING) {
4536 resvis_set(vis, type->ty_Visibility);
4540 type->ty_Flags |= TF_RESOLVING;
4543 * Remember that visibility data must be set at the head of any recursion
4548 switch (type->ty_Op) {
4551 * NOTE: Special case, PointerType and ReferenceType fields not in
4552 * classes XXX (force alignment and bytes)?
4554 dassert(type->ty_SQList ==
4555 &type->ty_ClassType.et_SemGroup->sg_ClassList);
4557 /* visibility already determined by resolveUnresClass? */
4558 dassert(type->ty_Visibility != 0);
4559 resvis_set(vis, type->ty_Visibility);
4562 * The superclass (if any) cannot depend on our subclass, so resolve
4563 * it first. Note that resolveUnresClass() does not do everything
4564 * because it has to be called in the ResolveClasses() stage, so
4565 * finish it up here with a real resolve.
4567 if (type->ty_ClassType.et_Super) {
4568 Type **superp = &type->ty_ClassType.et_Super;
4569 if ((*superp)->ty_Op == TY_UNRESOLVED)
4570 resolveUnresClass(*superp);
4571 ResolveType(*superp, NULL, 0);
4575 * DEPENDENCY - SG must resolve for us to resolve. (if we can't
4576 * resolve this it is likely an embedded object loop).
4578 sg = type->ty_ClassType.et_SemGroup;
4579 ResolveSemGroup(sg, 0);
4580 if (sg->sg_Flags & SGF_RESOLVED) {
4581 if (type != &PointerType && type != &ReferenceType) {
4582 type->ty_Bytes = sg->sg_Bytes;
4583 type->ty_AlignMask = sg->sg_AlignMask;
4590 * Fixup type ty_SQFlags here XXX removed Any hard class type must be
4591 * given the SF_HARD storage qualifier.
4593 if (sg->sg_Stmt->u.ClassStmt.es_Decl->d_ScopeFlags & SCOPE_HARD)
4594 type->ty_SQFlags |= SF_HARD;
4599 * NOTE: Do not set TF_HASLVREF, C pointers are not tracked.
4600 * We do set TF_HASPTR to indicate that the type is or
4601 * contains a pointer.
4603 * Always complete, even if the target type is incomplete. (allow
4604 * circular references).
4606 type->ty_Bytes = sizeof(void *);
4607 type->ty_AlignMask = RAWPTR_ALIGN;
4608 type->ty_Flags |= TF_HASPTR;
4609 myvis.visp = &type->ty_Visibility;
4610 ResolveType(type->ty_RawPtrType.et_Type, &myvis, 0);
4615 * Set TF_HASLVREF, references are tracked.
4617 * Always complete, even if the target type is incomplete. (allow
4618 * circular references).
4620 type->ty_Bytes = sizeof(ReferenceStor);
4621 type->ty_AlignMask = REFERENCESTOR_ALIGNMASK;
4622 type->ty_Flags |= TF_HASLVREF;
4623 myvis.visp = &type->ty_Visibility;
4624 ResolveType(type->ty_RefType.et_Type, &myvis, 0);
4629 * Inherit TF_HASLVREF and TF_HASPTR (if array type is or contains
4630 * something which needs to be tracked or checked).
4632 * The array size must resolve sufficiently for us to resolve.
4638 if (type->ty_AryType.et_OrigArySizeExp) {
4639 type->ty_AryType.et_ArySizeExp =
4640 DupExp(NULL, type->ty_AryType.et_OrigArySizeExp);
4642 exp = type->ty_AryType.et_ArySizeExp;
4643 atype = type->ty_AryType.et_Type;
4645 myvis.visp = &type->ty_Visibility;
4646 ResolveType(atype, &myvis, 0);
4647 exp = resolveConstExp(NULL, type->ty_AryType.et_SemGroup, exp, 0);
4649 if ((exp->ex_Flags & EXF_RESOLVED) &&
4650 (atype->ty_Flags & TF_RESOLVED))
4652 type->ty_AryType.et_ArySizeExp = exp;
4653 type->ty_AryType.et_Count = resolveGetConstExpInt64(exp);
4654 type->ty_AlignMask = type->ty_AryType.et_Type->ty_AlignMask;
4655 type->ty_Bytes = type->ty_AryType.et_Type->ty_Bytes *
4656 type->ty_AryType.et_Count;
4657 type->ty_Flags |= type->ty_AryType.et_Type->ty_Flags &
4658 (TF_HASLVREF | TF_HASPTR |
4659 TF_HASCONSTRUCT | TF_HASDESTRUCT |
4660 TF_HASGCONSTRUCT | TF_HASGDESTRUCT |
4668 * All elements of a compound type must resolve for the compound type
4671 * NOTE: TF_HASLVREF and TF_HASPTR inherited as appropriate
4674 sg = type->ty_CompType.et_SemGroup;
4675 ResolveSemGroup(sg, 0);
4676 if (sg->sg_Flags & SGF_RESOLVED) {
4677 type->ty_Bytes = sg->sg_Bytes;
4678 type->ty_AlignMask = sg->sg_AlignMask;
4679 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4685 * All elements of a compound type must resolve for the compound type
4688 * NOTE: TF_HASLVREF and TF_HASPTR inherited as appropriate
4691 sg = type->ty_VarType.et_SemGroup;
4692 ResolveSemGroup(sg, 0);
4693 if (sg->sg_Flags & SGF_RESOLVED) {
4694 type->ty_Bytes = sg->sg_Bytes;
4695 type->ty_AlignMask = sg->sg_AlignMask;
4696 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4702 * All elements of a compound type must resolve for the compound type
4705 * NOTE: TF_HASLVREF and TF_HASPTR inherited as appropriate
4708 sg = type->ty_ArgsType.et_SemGroup;
4709 ResolveSemGroup(sg, 0);
4710 if (sg->sg_Flags & SGF_RESOLVED) {
4711 type->ty_Bytes = sg->sg_Bytes;
4712 type->ty_AlignMask = sg->sg_AlignMask;
4713 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4719 * We mark the type as resolved regardless of the state of the
4720 * underlying argument and return types.
4722 * NOTE: Storage not tracked.
4725 type->ty_AlignMask = 0;
4726 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4727 resvis_set(vis, type->ty_Visibility);
4728 ResolveType(type->ty_ProcType.et_ArgsType, NULL, 0);
4729 ResolveType(type->ty_ProcType.et_RetType, NULL, 0);
4734 * Raw storage must always resolve.
4736 * NOTE: Base storage is not tracked.
4738 type->ty_Bytes = type->ty_StorType.et_Bytes;
4739 /* XXX check pwr of 2 */
4741 type->ty_AlignMask = type->ty_Bytes - 1;
4742 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4743 resvis_set(vis, type->ty_Visibility);
4748 * We loop until the type is no longer TY_UNRESOLVED.
4750 * NOTE: resolveUnresClass() is not really a recursive function so we
4751 * don't have to pre-set visibility.
4753 resolveUnresClass(type);
4754 /* visibility set by resolveUnresClass() */
4755 goto loop_unresolved;
4759 * A Dynamic type is basically unknown at compile-time. Always
4762 * NOTE: Tracking unknown (must be handled at run-time).
4764 type->ty_Visibility = SCOPE_ALL_VISIBLE;
4765 resvis_set(vis, type->ty_Visibility);
4770 * TY_IMPORT types cannot be directly referenced by the program. They
4771 * are implicitly used as a placeholder for a module's global storage
4774 * NOTE: Storage is persistent, so wrapper is not tracked.
4776 sg = type->ty_ImportType.et_SemGroup;
4777 ResolveSemGroup(sg, 0);
4778 type->ty_Visibility = SCOPE_ALL_VISIBLE; /* XXX */
4779 resvis_set(vis, type->ty_Visibility);
4783 dpanic("Unknown type %d (type=%p)", type->ty_Op, type);
4788 type->ty_Flags &= ~TF_RESOLVING;
4789 type->ty_Flags |= TF_RESOLVED;
4791 if (sg->sg_Flags & SGF_ISINTEGER)
4792 type->ty_Flags |= TF_ISINTEGER;
4793 if (sg->sg_Flags & SGF_ISUNSIGNED)
4794 type->ty_Flags |= TF_ISUNSIGNED;
4795 if (sg->sg_Flags & SGF_ISFLOATING)
4796 type->ty_Flags |= TF_ISFLOATING;
4797 if (sg->sg_Flags & SGF_ISBOOL)
4798 type->ty_Flags |= TF_ISBOOL;
4799 if (sg->sg_Flags & SGF_HASASS)
4800 type->ty_Flags |= TF_HASASS;
4801 if (sg->sg_Flags & SGF_HASPTR)
4802 type->ty_Flags |= TF_HASPTR;
4804 type->ty_Flags |= TF_HASLVREF;
4805 /* XXX TF_VARARGS */
4806 if (sg->sg_Flags & SGF_VARARGS)
4807 type->ty_Flags |= TF_HASLVREF;
4809 type->ty_Flags |= TF_HASCONSTRUCT;
4811 type->ty_Flags |= TF_HASDESTRUCT;
4813 * Combine constructor/destructor hint flags for globals because
4814 * we have just one linked list for global constructors and
4815 * destructors (no need to optimize heavily).
4818 type->ty_Flags |= TF_HASGCONSTRUCT | TF_HASGDESTRUCT;
4819 dassert(type->ty_Visibility != 0);
4823 * NOTE: visibility is always set prior to any deferral or
4830 * Resolve the default expression for the type, if any. We do not
4831 * require the expression to complete.
4833 * XXX qualified types just copy the exp. bad bad YYY
4835 * YYY ResolveExp() no ISG (import sem group)
4837 if (type->ty_OrigAssExp) {
4838 type->ty_Flags |= TF_HASASS;
4839 type->ty_AssExp = DupExp(sg, type->ty_OrigAssExp);
4840 type->ty_AssExp = ResolveExp(NULL, sg, type->ty_AssExp,
4846 * ty_DynamicVector is nominally used when a Rune binary is run, but we
4847 * also need to set up enough of it such that mixed interpretation and
4848 * execution, or even just straight interpretation, works. This is
4849 * because the interpreter calls into libruntime.
4851 type->ty_DynamicVector = DefaultDynamicVector;
4854 * NOTE: Cannot resolve type alignment here, it must be done in a
4855 * separate pass due to dependencies.
4861 * resolveUnresClass() - resolve an unresolved dotted id sequence into a
4864 * Unresolved type identifier sequences must be resolved. We are also
4865 * responsible for setting the visibility of the type's elements.
4868 resolveUnresClass(Type *super)
4873 int visibility = SCOPE_ALL_VISIBLE;
4876 dassert_type(super, super->ty_Op == TY_UNRESOLVED);
4878 dottedId = super->ty_UnresType.et_DottedId;
4879 sg = super->ty_UnresType.et_SemGroup;
4881 d = FindDeclPath(NULL, super->ty_UnresType.et_ImportSemGroup,
4883 dottedId, FDC_NULL, &visibility, -1, &eno);
4885 errorDottedId(dottedId, "Unable to resolve class");
4886 dassert_type(super, 0);
4890 * Resolve the unresolved type. Note that this occurs during class
4891 * resolution and we can't call ResolveType() here without getting into a
4892 * loop, so we do not yet know storage requirements (ty_Bytes and
4897 sg = d->d_ClassDecl.ed_SemGroup;
4899 TypeToQualType(sg->sg_ClassType, super, super->ty_OrigAssExp,
4900 super->ty_SQFlags | sg->sg_ClassType->ty_SQFlags,
4904 sg = d->d_ClassDecl.ed_SemGroup;
4905 super->ty_Op = TY_CLASS;
4906 super->ty_ClassType.et_SemGroup = sg;
4907 super->ty_ClassType.et_Super = d->d_ClassDecl.ed_Super;
4908 super->ty_Visibility = visibility;
4909 super->ty_SQFlags = sg->sg_ClassType->ty_SQFlags;
4910 if (super->ty_SQList)
4911 RUNE_REMOVE(super->ty_SQList, super, ty_Node);
4912 super->ty_SQList = &sg->sg_ClassList;
4913 RUNE_INSERT_TAIL(super->ty_SQList, super, ty_Node);
4915 dassert(visibility);
4916 /* can't resolve super here */
4918 * XXX should we move the class from the unresolved list to the new
4919 * SemGroup's actual list?
4924 * Adjust super instead of allocating a new super, so all other
4925 * references to super using this class path get resolved too.
4927 * XXX which AssExp do we use ?
4929 dassert_type(super, d->d_TypedefDecl.ed_Type != super);
4930 TypeToQualType(d->d_TypedefDecl.ed_Type,
4931 super, super->ty_AssExp,
4932 super->ty_SQFlags | d->d_TypedefDecl.ed_Type->ty_SQFlags,
4934 /* can't resolve super here */
4937 errorDottedId(dottedId, "identifier is not a class or typedef");
4938 dassert_type(super, 0);
4943 * Resolve the declarations in a non-stack semantic group. The sg is being
4944 * referenced by someone, who resolves it with this. This may take multiple
4947 * - Resolve all real storage elements, referenced or not, so the structure
4948 * has a consistent size. Size and Alignment becomes valid when primarily
4949 * resolution via SGF_RESOLVED / SGF_GRESOLVED completes.
4951 * - Most procedures are only resolved on-demand and are not resolved here.
4952 * However, access to the SG implies that all constructors and destructors
4953 * must be active, so we resolve those.
4955 * - We must also resolve any DF_DYNAMICREF'd procedures, which are dynamic
4956 * method calls in sub-classes. The flag is set on the method in the
4957 * subclass when a method call is made in any super-class.
4959 * (Any newly added DF_DYNAMICREF'd procedures will be resolved by the code
4960 * setting the flag if it finds that the SG is undergoing resolution or
4961 * already resolved).
4963 * - We supply a dynamic index for all procedures, whether they are
4964 * referenced or not, and leave the index NULL if they are not. This allows
4965 * us to resolve the indices & extent of the dynamic index array even if late
4966 * procedures are added.
4968 * NOTE! This code does not resolve declarations related to executable
4969 * semantic groups, such as sub-blocks within a procedure, but it does have
4970 * to resolve procedure definitions found in Class's and such.
4972 * NOTE! This code handles the last stage of subclass refinement, by checking
4973 * the validity of the refinement and setting sg_Compat properly.
4977 ResolveSemGroup(SemGroup *sg, int retry)
4984 if ((sg->sg_Flags & (SGF_RESOLVED | SGF_GRESOLVED)) ==
4985 (SGF_RESOLVED | SGF_GRESOLVED))
4989 if (sg->sg_Flags & (SGF_RESOLVING | SGF_GRESOLVING)) {
4994 if (sg->sg_Flags & SGF_RESOLVED)
4996 sg->sg_Flags |= SGF_RESOLVING;
5001 * index 0 - reserved for dynamic initialization index 1 - reserved for
5002 * dynamic destructor
5007 * SECTION1 - INSTANTIATED OBJECT RESOLUTION & PROCEDURE RESOLUTION
5009 * Handle SCOPE_REFINE and DF_DYNAMICREF flagging. We resolve non-global
5010 * elements with real storage.
5012 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5014 * DF_DYNAMICREF requires that the declaration be resolved because it
5015 * might be used in a dynamic method call, even if it was not
5016 * directly referenced. So if the SemGroup (i.e. class) is
5017 * referenced at all, so to must the method.
5019 if (d->d_Flags & DF_DYNAMICREF) {
5020 if ((d->d_Flags & (DF_RESOLVED | DF_RESOLVING)) == 0) {
5026 * Process all procedures and any non-global instantiated storage.
5036 * Assign the dynamic index. There may be multiple entries for
5037 * the same d_Id, they are ordered such that refinements use the
5038 * same DynIndex as in the superclass which is what allows
5039 * dynamic method calls to work properly. All non-refined
5040 * subclass elements are ordered after all refined/non=refined
5041 * superclass elements (replacing the superclass element and
5042 * using the same DynIndex when refined).
5044 * We must assign d_DynIndex regardless of whether the procedure
5045 * is used or not to guarantee a consistent index between
5046 * super-class and sub-class.
5048 if ((d->d_ScopeFlags & SCOPE_INTERNAL) == 0 &&
5049 (d->d_ProcDecl.ed_Type->ty_SQFlags & (SF_METHOD | SF_GMETHOD)))
5051 d->d_DynIndex = dyncount;
5056 * Only process referenced procedures, plus any that were flagged
5057 * (see above), plus any constructors or destructors.
5059 if ((d->d_Flags & (DF_RESOLVED | DF_RESOLVING)) == 0) {
5060 if (d->d_ScopeFlags & (SCOPE_CONSTRUCTOR |
5061 SCOPE_DESTRUCTOR)) {
5065 if ((d->d_Flags & (DF_RESOLVED | DF_RESOLVING)) == 0)
5068 if (d->d_ScopeFlags & SCOPE_GLOBAL) {
5069 if ((d->d_Flags & DF_ONGLIST) == 0 &&
5070 (d->d_ScopeFlags & (SCOPE_CONSTRUCTOR |
5071 SCOPE_DESTRUCTOR))) {
5072 d->d_GNext = d->d_MyGroup->sg_GBase;
5073 d->d_MyGroup->sg_GBase = d;
5074 d->d_Flags |= DF_ONGLIST;
5075 sg->sg_Flags |= SGF_GABICALL;
5078 if ((d->d_Flags & DF_ONCLIST) == 0 &&
5079 (d->d_ScopeFlags & SCOPE_CONSTRUCTOR)) {
5080 d->d_CNext = d->d_MyGroup->sg_CBase;
5081 d->d_MyGroup->sg_CBase = d;
5082 d->d_Flags |= DF_ONCLIST;
5083 sg->sg_Flags |= SGF_ABICALL;
5085 if ((d->d_Flags & DF_ONDLIST) == 0 &&
5086 (d->d_ScopeFlags & SCOPE_DESTRUCTOR)) {
5087 d->d_DNext = d->d_MyGroup->sg_DBase;
5088 d->d_MyGroup->sg_DBase = d;
5089 d->d_Flags |= DF_ONDLIST;
5090 sg->sg_Flags |= SGF_ABICALL;
5094 case DOP_STACK_STORAGE:
5096 * can't happen. Stack storage is only used in executable
5097 * contexts. The SGs for executable contexts are not handled
5098 * by ResolveSemGroup()
5102 case DOP_ARGS_STORAGE:
5103 case DOP_GROUP_STORAGE:
5105 * Stop if the resolver looped, caller may try later
5108 if ((d->d_Flags & DF_RESOLVED) == 0) {
5114 * Update SG size, alignment, set d_Offset and d_Storage within
5117 if (sg->sg_AlignMask < d->d_AlignMask)
5118 sg->sg_AlignMask = d->d_AlignMask;
5119 sg->sg_Bytes = BASEALIGN(sg->sg_Bytes, d->d_AlignMask);
5120 d->d_Offset = sg->sg_Bytes;
5123 * Set d_Storage based on scope and intended default for d_Op.
5125 sg->sg_Bytes += d->d_Bytes;
5127 type = d->d_StorDecl.ed_Type;
5128 if (d->d_StorDecl.ed_OrigAssExp)
5129 sg->sg_Flags |= SGF_HASASS;
5130 if (type->ty_Flags & TF_HASASS)
5131 sg->sg_Flags |= SGF_HASASS;
5132 if (type->ty_Flags & TF_HASLVREF)
5133 sg->sg_Flags |= SGF_HASLVREF;
5134 if (type->ty_Flags & TF_HASPTR)
5135 sg->sg_Flags |= SGF_HASPTR;
5136 if (type->ty_Flags & TF_HASCONSTRUCT)
5137 sg->sg_Flags |= SGF_ABICALL;
5138 if (type->ty_Flags & TF_HASDESTRUCT)
5139 sg->sg_Flags |= SGF_ABICALL;
5140 if (type->ty_Flags & TF_HASGCONSTRUCT)
5141 sg->sg_Flags |= SGF_GABICALL;
5142 if (type->ty_Flags & TF_HASGDESTRUCT)
5143 sg->sg_Flags |= SGF_GABICALL;
5144 checkUnrestrictedType(d, type);
5146 case DOP_GLOBAL_STORAGE:
5147 /* handled in pass2 */
5150 dpanic_sg(sg, 0, "Unknown d->d_Op %d", d->d_Op);
5155 * Finish up any refinements. (Effects 'ok'? no for now)
5157 if (d->d_ScopeFlags & SCOPE_REFINE) {
5158 if (d->d_Flags & (DF_RESOLVING | DF_RESOLVED)) {
5159 ResolveDecl(d->d_Super, 0);
5161 RefineDeclaration(sg, d->d_Super, d);
5168 sg->sg_Bytes = BASEALIGN(sg->sg_Bytes, sg->sg_AlignMask);
5169 sg->sg_Flags &= ~SGF_RESOLVING;
5170 sg->sg_Flags |= SGF_RESOLVED;
5173 * If no dynamic methods and no dynamic initialization or destruction
5174 * required, set dyncount to 0.
5176 if (dyncount == 2 &&
5177 (sg->sg_Flags & SGF_HASASS) == 0 &&
5178 sg->sg_SRBase == NULL &&
5179 sg->sg_CBase == NULL &&
5180 sg->sg_DBase == NULL) {
5183 sg->sg_DynCount = dyncount;
5184 sg->sg_Flags &= ~SGF_RESOLVING;
5188 * SECTION2 - GLOBAL RESOLUTION
5191 if (sg->sg_Flags & SGF_GRESOLVED)
5193 sg->sg_Flags |= SGF_GRESOLVING;
5194 sg->sg_GlobalBytes = 0;
5197 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5205 case DOP_STACK_STORAGE:
5207 * can't happen. Stack storage is only used in executable
5211 case DOP_ARGS_STORAGE:
5212 case DOP_GROUP_STORAGE:
5214 * Non-globals were handled in section1
5217 case DOP_GLOBAL_STORAGE:
5219 * Global storage is handled in section2
5221 * NOTE: We only process referenced global storage. This will
5222 * include global elements referenced by constructors, which are
5223 * always run even if not specifically referenced.
5226 if ((d->d_Flags & (DF_RESOLVING | DF_RESOLVED)) == 0)
5229 if ((d->d_Flags & DF_RESOLVED) == 0) {
5234 if (sg->sg_GlobalAlignMask < d->d_AlignMask)
5235 sg->sg_GlobalAlignMask = d->d_AlignMask;
5236 sg->sg_GlobalBytes = (sg->sg_GlobalBytes + d->d_AlignMask) &
5238 d->d_Offset = sg->sg_GlobalBytes;
5239 sg->sg_GlobalBytes += d->d_Bytes;
5240 if (d->d_StorDecl.ed_OrigAssExp)
5241 sg->sg_Flags |= SGF_GHASASS;
5243 type = d->d_StorDecl.ed_Type;
5244 if (type->ty_Flags & TF_HASASS)
5245 sg->sg_Flags |= SGF_GHASASS;
5246 if (type->ty_Flags & TF_HASLVREF)
5247 sg->sg_Flags |= SGF_GHASLVREF;
5248 if (type->ty_Flags & TF_HASPTR)
5249 sg->sg_Flags |= SGF_GHASPTR;
5250 if (type->ty_Flags & TF_HASCONSTRUCT)
5251 sg->sg_Flags |= SGF_ABICALL;
5252 if (type->ty_Flags & TF_HASDESTRUCT)
5253 sg->sg_Flags |= SGF_ABICALL;
5254 if (type->ty_Flags & TF_HASGCONSTRUCT)
5255 sg->sg_Flags |= SGF_ABICALL;
5256 if (type->ty_Flags & TF_HASGDESTRUCT)
5257 sg->sg_Flags |= SGF_ABICALL;
5258 checkUnrestrictedType(d, type);
5261 dpanic_sg(sg, 0, "Unknown d->d_Op %d", d->d_Op);
5266 * Finish up any refinements. (Effects 'ok'? no for now)
5268 if (d->d_ScopeFlags & SCOPE_REFINE) {
5269 if (d->d_Flags & (DF_RESOLVING | DF_RESOLVED)) {
5270 ResolveDecl(d->d_Super, 0);
5272 RefineDeclaration(sg, d->d_Super, d);
5278 * Structures may not directly or indirectly contain pointers
5279 * or references unless they are internal. XXX
5281 * For now allow pointers but not references.
5283 if (sg->sg_Op == SG_CLASS &&
5284 (sg->sg_ClassType->ty_SQFlags & (SF_STRUCT | SF_INTERNAL)) == SF_STRUCT)
5286 if (sg->sg_Flags & (SGF_HASLVREF /* | SGF_HASPTR*/)) {
5287 dfatal_sg(sg, TOK_ERR_STRUCT_CONTENT, NULL);
5295 sg->sg_GlobalBytes = (sg->sg_GlobalBytes + sg->sg_GlobalAlignMask) &
5296 ~sg->sg_GlobalAlignMask;
5297 sg->sg_Flags &= ~SGF_GRESOLVING;
5298 sg->sg_Flags |= SGF_GRESOLVED;
5302 * SECTION3 - Final rollup (future)
5306 if ((sg->sg_Flags & (SGF_RESOLVED | SGF_GRESOLVED)) !=
5307 (SGF_RESOLVED | SGF_GRESOLVED))
5313 * This gets hit of Int32Type is resolved before its class.
5314 * This is a big no-no.
5316 if (sg == Int32Type.ty_ClassType.et_SemGroup &&
5319 dpanic("Resolver improperly early-resolved Int32Type\n");
5325 * findExpOper() - Find operator declaration matching expression
5327 * Locate the operator declaration (a DOP_PROCDEF) that matches the
5328 * expression or NULL if no match could be found. The expression's left and
5329 * right hand sides must already be resolved.
5331 * NOTE! A temporary 'copy' Exp may be passed, not all fields are valid.
5333 static Declaration *testIConstantForType(Declaration *d, Type *type, Exp *exp);
5334 static Declaration *testFConstantForType(Declaration *d, Type *type, Exp *exp);
5338 findExpOper(Exp *exp, int flags)
5344 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
5346 if (exp->ex_Flags & EXF_BINARY) {
5347 rtype = exp->ex_Rhs->ex_Type;
5348 ltype = exp->ex_Lhs->ex_Type;
5350 dassert(exp->ex_Flags & EXF_UNARY);
5352 ltype = exp->ex_Lhs->ex_Type;
5356 * XXX look in our local semantic hierarchy for a compatible operator ?
5360 * Attempt to find a matching operator from the left hand side type.
5362 d = findOper(ltype, exp->ex_Id, ltype, rtype, flags);
5364 if (d || (exp->ex_Flags & EXF_BINARY) == 0)
5368 * Attempt to find a matching binary operator from the right hand side
5371 d = findOper(rtype, exp->ex_Id, ltype, rtype, flags);
5374 * If that fails but either the left or right-hand sides are constants,
5375 * see if we can find an operator by casting the constant to the
5379 if (exp->ex_Rhs->ex_Token == TOK_INTEGER &&
5380 exp->ex_Lhs->ex_Token != TOK_INTEGER &&
5381 exp->ex_Lhs->ex_Token != TOK_FLOAT &&
5382 (ltype->ty_Flags & TF_ISINTEGER)) {
5383 d = findOper(ltype, exp->ex_Id, ltype, ltype, flags);
5385 d = testIConstantForType(d, ltype, exp->ex_Rhs);
5386 } else if (exp->ex_Lhs->ex_Token == TOK_INTEGER &&
5387 exp->ex_Rhs->ex_Token != TOK_INTEGER &&
5388 exp->ex_Rhs->ex_Token != TOK_FLOAT &&
5389 (rtype->ty_Flags & TF_ISINTEGER)) {
5390 d = findOper(rtype, exp->ex_Id, rtype, rtype, flags);
5392 d = testIConstantForType(d, rtype, exp->ex_Lhs);
5393 } else if (exp->ex_Rhs->ex_Token == TOK_FLOAT &&
5394 exp->ex_Lhs->ex_Token != TOK_INTEGER &&
5395 exp->ex_Lhs->ex_Token != TOK_FLOAT &&
5396 (ltype->ty_Flags & TF_ISFLOATING)) {
5397 d = findOper(ltype, exp->ex_Id, ltype, ltype, flags);
5399 d = testFConstantForType(d, ltype, exp->ex_Rhs);
5400 } else if (exp->ex_Lhs->ex_Token == TOK_FLOAT &&
5401 exp->ex_Rhs->ex_Token != TOK_INTEGER &&
5402 exp->ex_Rhs->ex_Token != TOK_FLOAT &&
5403 (rtype->ty_Flags & TF_ISFLOATING)) {
5404 d = findOper(rtype, exp->ex_Id, rtype, rtype, flags);
5406 d = testFConstantForType(d, rtype, exp->ex_Lhs);
5414 * Calculate whether the constant can be safely cast. If it can, cast the
5415 * constant and return d. Otherwise complain and return NULL.
5419 testIConstantForType(Declaration *d, Type *type, Exp *exp)
5421 int64_t v = resolveGetConstExpInt64(exp);
5423 if (type->ty_Flags & TF_ISUNSIGNED) {
5424 switch (type->ty_Bytes) {
5426 if (v != (int64_t) (uint8_t) v)
5430 if (v != (int64_t) (uint16_t) v)
5434 if (v != (int64_t) (uint32_t) v)
5443 switch (type->ty_Bytes) {
5445 if (v != (int64_t) (int8_t) v)
5449 if (v != (int64_t) (int16_t) v)
5453 if (v != (int64_t) (int32_t) v)
5464 * If successful change the constant's type and reset the interpreter to
5468 exp->ex_Type = type;
5469 exp->ex_Run = RunUnresolvedExp;
5470 exp->ex_Run64 = Run64DefaultExp;
5472 dwarn_exp(exp, TOK_ERR_AUTOCAST_VALUE, NULL);
5479 testFConstantForType(Declaration *d, Type *type, Exp *exp)
5481 float128_t v = resolveGetConstExpFloat128(exp);
5483 switch (type->ty_Bytes) {
5485 if (v != (float32_t) v)
5489 if (v != (float64_t) v)
5497 * If successful change the constant's type and reset the interpreter to
5501 exp->ex_Type = type;
5502 exp->ex_Run = RunUnresolvedExp;
5503 exp->ex_Run64 = Run64DefaultExp;
5505 dwarn_exp(exp, TOK_ERR_AUTOCAST_VALUE, NULL);
5512 findOper(Type *btype, runeid_t id, Type *ltype, Type *rtype, int flags)
5516 int args = (rtype != NULL) ? 2 : 1;
5518 flags &= ~RESOLVE_AUTOCAST; /* not applicable to this function */
5521 * Locate the base type. If the base type does not have a SemGroup there
5522 * are no operators. (XXX put system operators here)
5524 sg = BaseType(&btype);
5530 * Look for the operator in the SemGroup
5532 * TODO - For reasons currently unknown, complex internal operators
5533 * in the Pointer and Reference class (and probably others)
5534 * are not able to completely match if we do not pre-resolve
5535 * all procedural declarations before looking for matches.
5536 * It is unclear why this is the case.
5539 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5540 if (d->d_MyGroup == sg && d->d_Op == DOP_PROC) {
5545 for (d = FindOperId(sg, id, args); d; d = d->d_ONext) {
5547 if (d->d_MyGroup == sg &&
5548 d->d_Op == DOP_PROC &&
5549 d->d_ProcDecl.ed_OperId == id &&
5550 MatchOperatorTypes(d, ltype, rtype))
5557 * Failed. If the base type is a compound type, look for the operator in
5558 * the SemGroup for each element making up the compound type. e.g. so
5559 * (mycustomtype, double) would find the operator in mycustomtype.
5561 if (btype->ty_Op == TY_COMPOUND) {
5562 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5564 if (d->d_Op & DOPF_STORAGE) {
5565 d2 = findOper(d->d_StorDecl.ed_Type, id,
5566 ltype, rtype, flags);
5567 } else if (d->d_Op == DOP_TYPEDEF) {
5568 d2 = findOper(d->d_TypedefDecl.ed_Type, id,
5569 ltype, rtype, flags);
5581 errorDottedId(runeid_t *ary, const char *ctl,...)
5583 char buf[RUNE_IDTOSTR_LEN];
5588 vfprintf(stderr, ctl, va);
5590 fprintf(stderr, ": %s", runeid_text(ary[0], buf));
5591 for (i = 1; ary[i]; ++i)
5592 fprintf(stderr, ".%s", runeid_text(ary[i], buf));
5593 fprintf(stderr, "\n");
5597 * Resolve the alignment requirements for SemGroups related to statements,
5598 * including the alignment requirements needed for temporary expression
5603 ResolveAlignment(Stmt *st, int flags)
5605 SemGroup *sg = st->st_MyGroup;
5611 * RESOLVE_CLEAN - If set, RESOLVE_FINALIZE is also always set
5613 * RESOLVE_FINALIZE - Must also resolve indirect dependencies that
5614 * do not directly affect alignment of (type).
5616 if (flags & RESOLVE_CLEAN) {
5617 if ((st->st_RState & (RSF_ALIGN | RSF_SUB_ALIGN)) == 0)
5619 st->st_RState &= ~(RSF_ALIGN | RSF_SUB_ALIGN |
5620 RSF_STORAGE | RSF_SUB_STORAGE);
5621 } else if (flags & RESOLVE_FINALIZE) {
5622 if (st->st_RState & RSF_SUB_ALIGN)
5624 st->st_RState |= RSF_ALIGN | RSF_SUB_ALIGN;
5626 if (st->st_RState & RSF_ALIGN)
5628 st->st_RState |= RSF_ALIGN;
5632 * If this is an executable semantic layer or an import layer then assign
5633 * alignment to declarations up-front. Of the various DOP_*_STORAGE ops,
5634 * we should only see DOP_STACK_STORAGE and DOP_GLOBAL_STORAGE.
5636 * Note: if this is the root ST_Import STF_SEMANTIC is *NOT* set and sg
5639 if ((st->st_Flags & STF_SEMANTIC) && st->st_Op != ST_Class) {
5643 * Pre-scan for alignment. Don't try to propagate the alignment to
5644 * the parent for now as that would require recalculating the
5647 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5649 case DOP_STACK_STORAGE:
5650 case DOP_ARGS_STORAGE:
5651 case DOP_GROUP_STORAGE:
5652 if (sg->sg_AlignMask < d->d_AlignMask)
5653 sg->sg_AlignMask = d->d_AlignMask;
5655 case DOP_GLOBAL_STORAGE:
5656 if (sg->sg_GlobalAlignMask < d->d_AlignMask)
5657 sg->sg_GlobalAlignMask = d->d_AlignMask;
5665 switch (st->st_Op) {
5673 if (st->st_TypedefStmt.es_Decl->d_Flags & DF_RESOLVED) {
5674 resolveDeclAlign(st->st_TypedefStmt.es_Decl,
5675 &sg->sg_TmpAlignMask, flags);
5680 * NOTE: Don't calculate for declarations that belong in a different
5687 d = st->st_DeclStmt.es_Decl;
5689 for (i = 0; i < st->st_DeclStmt.es_DeclCount; ++i) {
5690 if (st->st_MyGroup == d->d_MyGroup &&
5691 (d->d_Flags & DF_RESOLVED))
5693 resolveDeclAlign(d, &sg->sg_TmpAlignMask, flags);
5695 d = RUNE_NEXT(d, d_Node);
5707 if (st->st_LoopStmt.es_BCond) {
5708 resolveExpAlign(st->st_LoopStmt.es_BCond,
5709 &sg->sg_TmpAlignMask, flags);
5711 if (st->st_LoopStmt.es_ACond) {
5712 resolveExpAlign(st->st_LoopStmt.es_ACond,
5713 &sg->sg_TmpAlignMask, flags);
5715 if (st->st_LoopStmt.es_AExp) {
5716 resolveExpAlign(st->st_LoopStmt.es_AExp,
5717 &sg->sg_TmpAlignMask, flags);
5726 resolveExpAlign(st->st_IfStmt.es_Exp, &sg->sg_TmpAlignMask, flags);
5729 if (st->st_RetStmt.es_Exp)
5730 resolveExpAlign(st->st_RetStmt.es_Exp, &sg->sg_TmpAlignMask, flags);
5733 if (st->st_ResStmt.es_Exp)
5734 resolveExpAlign(st->st_ResStmt.es_Exp, &sg->sg_TmpAlignMask, flags);
5738 * The switch expression's temporary data must be saved while we are
5739 * executing the sub-statements (the cases).
5741 resolveExpAlign(st->st_SwStmt.es_Exp, &sg->sg_TmpAlignMask, flags);
5744 if (st->st_CaseStmt.es_Exp)
5745 resolveExpAlign(st->st_CaseStmt.es_Exp,
5746 &sg->sg_TmpAlignMask, flags);
5749 resolveExpAlign(st->st_ExpStmt.es_Exp, &sg->sg_TmpAlignMask, flags);
5751 case ST_ThreadSched:
5754 dassert_stmt(st, 0);
5758 * Calculate storage requirements for substatements. offset acts as our
5759 * base. We union the storage for the substatements together. Note that
5760 * often scan->sg_MyGroup == sg.
5762 RUNE_FOREACH(scan, &st->st_List, st_Node) {
5763 if (scan->st_Op == ST_Class) {
5764 if (scan->u.ClassStmt.es_Decl->d_Flags & DF_RESOLVED)
5765 ResolveAlignment(scan, flags);
5766 } else if (scan->st_Op == ST_Decl &&
5767 scan->st_DeclStmt.es_Decl->d_MyGroup !=
5773 } else if (scan->st_Op == ST_Decl &&
5774 (scan->st_DeclStmt.es_Decl->d_Flags & DF_RESOLVED)) {
5776 * See prior comments, skip declarations that were moved to
5779 * (already resolved so can use junk offsets)
5781 resolveDeclAlign(scan->st_DeclStmt.es_Decl,
5782 &sg->sg_TmpAlignMask,
5784 } else if (scan->st_Op == ST_Proc &&
5785 scan->st_ProcStmt.es_Decl->d_ProcDecl.ed_OrigBody == scan)
5787 /* Do not resolve template procedures! */
5788 } else if (scan->st_Flags & STF_SEMTOP) {
5789 if (scan->st_Flags & STF_RESOLVED)
5790 ResolveAlignment(scan, flags);
5792 if (scan->st_Flags & STF_RESOLVED)
5793 ResolveAlignment(scan, flags);
5798 * If this is a new semantic level then fully resolve the alignment for
5799 * the SG as a final clean-up (for alignment anyway).
5801 * This will redundantly calculate temporary space requirements.
5803 * Note that for non-Class executable SemGroup's TmpBytes is incorporated
5804 * in a downward fashion while sg_Bytes is incorporated in an upward
5805 * fashion. It can become quite confusing. Don't ask me why I did it
5808 if (st->st_Flags & STF_SEMANTIC) {
5809 resolveSemGroupAlign(sg, flags);
5810 //if ((sg->sg_RState & RSF_SUB_ALIGN) == 0) {
5811 // resolveSemGroupAlign(sg, flags);
5816 * Propagate alignment requirements upward.
5818 if ((st->st_Flags & (STF_SEMANTIC | STF_SEMTOP)) == STF_SEMANTIC) {
5819 if (sg->sg_Parent->sg_AlignMask < sg->sg_AlignMask)
5820 sg->sg_Parent->sg_AlignMask = sg->sg_AlignMask;
5821 if (sg->sg_Parent->sg_TmpAlignMask < sg->sg_TmpAlignMask)
5822 sg->sg_Parent->sg_TmpAlignMask = sg->sg_TmpAlignMask;
5827 * ResolveStorage() - Final storage resolution pass
5829 * This pass carefully scans the SemGroup hierarchy and assigns offsets to
5832 * PROCEDURES - all the various 'executable' semantic layers in a procedure
5833 * are collapsed together for efficiency, so we only have to manage one
5834 * context. This means that the d_Offset assigned to declarations in
5835 * sub-blocks may exceed the sg_ size of the sub-block's SemGroup. We do not
5836 * attempt to resolve procedure body templates (d_ProcDecl.ed_OrigBody).
5838 * CLASSES - are given offsets in their SemGroup's relative to 0, if not
5841 * IMPORTS - are given offsets in their SemGroup's relative to 0
5843 * COMPOUND TYPES - (such as procedure arguments) are given offsets in their
5844 * SemGroup's relative to 0.
5846 * TEMPORARY STORAGE - expressions may require temporary storage for
5847 * intermediate results. That space is reserved here.
5849 * We specifically do not resolve unrelated storage.
5853 ResolveStorage(Stmt *st, int flags)
5859 SemGroup *sg = st->st_MyGroup;
5864 * State machine for Storage (not used for cleaning)
5866 * RESOLVE_FINALIZE - Must also resolve indirect dependencies that
5867 * do not directly affect storage for (st).
5869 if (flags & RESOLVE_FINALIZE) {
5870 dassert_stmt(st, st->st_RState & RSF_SUB_ALIGN);
5871 if (st->st_RState & RSF_SUB_STORAGE)
5873 st->st_RState |= RSF_STORAGE | RSF_SUB_STORAGE;
5875 dassert_stmt(st, st->st_RState & RSF_ALIGN);
5876 if (st->st_RState & RSF_STORAGE)
5878 st->st_RState |= RSF_STORAGE;
5882 * TODO - pure expressions can be run multiple times
5883 * dassert((st->st_Flags & STF_TMPRES1/2) == 0);
5887 * If this is an executable semantic layer or an import layer then assign
5888 * storage to declarations up-front. Of the various DOP_*_STORAGE ops,
5889 * we should only see DOP_STACK_STORAGE and DOP_GLOBAL_STORAGE.
5891 * Note: If this is the root ST_Import STF_SEMANTIC is *NOT* set and sg
5894 if ((st->st_Flags & STF_SEMANTIC) && st->st_Op != ST_Class) {
5898 * Make sure we aren't stuck in a recursive loop. If the SG
5899 * has already been resolved, assert calculated offsets.
5901 //dassert((sg->sg_Flags & (SGF_FRESOLVED | SGF_FRESOLVING)) == 0);
5902 dassert((sg->sg_Flags & SGF_FRESOLVING) == 0);
5904 sg->sg_Flags |= SGF_FRESOLVING;
5907 * The base offset for sub-semantic-blocks must match the alignment
5908 * they require in order to allow us to do an aligned BZEROing of the
5909 * space. We do not include the temporary space here (it does not
5910 * need to be BZERO'd).
5912 * NOTE: sg_TmpAlignMask is taken into accoun when the top-level
5913 * frame is allocated.
5915 if (st->st_Flags & STF_SEMTOP) {
5919 SemGroup *psg = sg->sg_Parent;
5921 base = BASEALIGN(psg->sg_BlkOffset + psg->sg_BlkBytes,
5923 gbase = BASEALIGN(psg->sg_GlobalBlkOffset + psg->sg_GlobalBlkBytes,
5924 sg->sg_GlobalAlignMask);
5927 dassert((sg->sg_Flags & SGF_FRESOLVED) == 0 ||
5928 sg->sg_BlkOffset == base);
5929 sg->sg_BlkOffset = base;
5930 sg->sg_GlobalBlkOffset = gbase;
5933 * Classify storage (note: class decls are handled elsewhere)
5935 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
5937 * Set d_Storage based on scope and intended default for d_Op.
5940 case DOP_STACK_STORAGE:
5941 case DOP_ARGS_STORAGE:
5942 case DOP_GROUP_STORAGE:
5943 type = d->d_StorDecl.ed_Type;
5944 base = BASEALIGN(base, d->d_AlignMask);
5945 dassert((sg->sg_Flags & SGF_FRESOLVED) == 0 ||
5946 (urunesize_t)d->d_Offset == base);
5950 if (d->d_StorDecl.ed_OrigAssExp)
5951 sg->sg_Flags |= SGF_HASASS;
5952 if (type->ty_Flags & TF_HASASS)
5953 sg->sg_Flags |= SGF_HASASS;
5954 if (type->ty_Flags & TF_HASLVREF)
5955 sg->sg_Flags |= SGF_HASLVREF;
5956 if (type->ty_Flags & TF_HASPTR)
5957 sg->sg_Flags |= SGF_HASPTR;
5958 if (type->ty_Flags & TF_HASCONSTRUCT)
5959 sg->sg_Flags |= SGF_ABICALL;
5960 if (type->ty_Flags & TF_HASDESTRUCT)
5961 sg->sg_Flags |= SGF_ABICALL;
5962 if (type->ty_Flags & TF_HASGCONSTRUCT)
5963 sg->sg_Flags |= SGF_ABICALL;
5964 if (type->ty_Flags & TF_HASGDESTRUCT)
5965 sg->sg_Flags |= SGF_ABICALL;
5966 checkUnrestrictedType(d, type);
5968 case DOP_GLOBAL_STORAGE:
5969 type = d->d_StorDecl.ed_Type;
5970 gbase = BASEALIGN(gbase, d->d_AlignMask);
5971 dassert((sg->sg_Flags & SGF_FRESOLVED) == 0 ||
5972 (urunesize_t)d->d_Offset == gbase);
5973 d->d_Offset = gbase;
5974 gbase += d->d_Bytes;
5975 if (d->d_StorDecl.ed_OrigAssExp)
5976 sg->sg_Flags |= SGF_GHASASS;
5977 if (type->ty_Flags & TF_HASASS)
5978 sg->sg_Flags |= SGF_GHASASS;
5979 if (type->ty_Flags & TF_HASLVREF)
5980 sg->sg_Flags |= SGF_GHASLVREF;
5981 if (type->ty_Flags & TF_HASPTR)
5982 sg->sg_Flags |= SGF_GHASPTR;
5983 if (type->ty_Flags & TF_HASCONSTRUCT)
5984 sg->sg_Flags |= SGF_ABICALL;
5985 if (type->ty_Flags & TF_HASDESTRUCT)
5986 sg->sg_Flags |= SGF_ABICALL;
5987 if (type->ty_Flags & TF_HASGCONSTRUCT)
5988 sg->sg_Flags |= SGF_ABICALL;
5989 if (type->ty_Flags & TF_HASGDESTRUCT)
5990 sg->sg_Flags |= SGF_ABICALL;
5991 checkUnrestrictedType(d, type);
5999 * The byte size of the block does not have to be aligned, but
6000 * aligning it (within reason) might provide a benefit.
6002 dassert((sg->sg_Flags & SGF_FRESOLVED) == 0 ||
6003 sg->sg_BlkBytes == base - sg->sg_BlkOffset);
6004 sg->sg_Bytes = base;
6005 dassert((sg->sg_Flags & SGF_FRESOLVED) == 0 ||
6006 sg->sg_GlobalBlkBytes == gbase - sg->sg_GlobalBlkOffset);
6007 sg->sg_GlobalBytes = gbase;
6010 dassert((sg->sg_Flags & SGF_FRESOLVED) == 0 ||
6011 sg->sg_BlkBytes == sg->sg_Bytes - sg->sg_BlkOffset);
6012 sg->sg_BlkBytes = base - sg->sg_BlkOffset;
6013 sg->sg_GlobalBlkBytes = gbase - sg->sg_GlobalBlkOffset;
6014 sg->sg_Flags |= SGF_FRESOLVED;
6015 sg->sg_Flags &= ~SGF_FRESOLVING;
6019 * Figure out how much temporary space we need to be able to execute
6020 * statements and expressions. Temporary space, like the main procedural
6021 * space, must be inherited from and consolidated into the top-level
6025 base = sg->sg_TmpBytes;
6026 gbase = sg->sg_GlobalTmpBytes;
6029 * Root ST_Import. avoid compiler warnings
6037 switch (st->st_Op) {
6039 if (st->st_ImportStmt.es_DLL) {
6042 func = dlsym(st->st_ImportStmt.es_DLL, "ResolveStorage");
6051 if (st->st_TypedefStmt.es_Decl->d_Flags & DF_RESOLVED) {
6052 resolveDeclStorage(st->st_TypedefStmt.es_Decl, flags,
6053 base, &limit, gbase, &glimit);
6058 * Temporary space for declarations are handled here.
6060 * Resolve declarations, skipping any whos context was moved to a
6061 * class (e.g. a declaration at the top level of a file like
6062 * Fd.setfd(...) also exists in the Fd class).
6068 d = st->st_DeclStmt.es_Decl;
6070 if (d->d_Op == DOP_GLOBAL_STORAGE)
6071 st->st_DeclStmt.es_TmpOffset = gbase;
6073 st->st_DeclStmt.es_TmpOffset = base;
6074 for (i = 0; i < st->st_DeclStmt.es_DeclCount; ++i) {
6075 if (st->st_MyGroup != d->d_MyGroup) {
6076 /* printf("SKIPB %s\n", d->d_Id); */
6078 * resolveDeclStorage(d, flags,
6079 * base, &limit, gbase, &glimit);
6081 } else if (d->d_Flags & DF_RESOLVED) {
6082 resolveDeclStorage(d, flags, base, &limit, gbase, &glimit);
6084 resolveDeclStorage(d, flags, base, &limit, gbase, &glimit);
6086 d = RUNE_NEXT(d, d_Node);
6098 if (st->st_LoopStmt.es_BCond && (flags & RESOLVE_FINALIZE)) {
6099 resolveExpStorage(st->st_LoopStmt.es_BCond, flags,
6102 if (st->st_LoopStmt.es_ACond && (flags & RESOLVE_FINALIZE)) {
6103 resolveExpStorage(st->st_LoopStmt.es_ACond, flags,
6106 if (st->st_LoopStmt.es_AExp && (flags & RESOLVE_FINALIZE)) {
6107 resolveExpStorage(st->st_LoopStmt.es_AExp, flags,
6117 if (flags & RESOLVE_FINALIZE)
6118 resolveExpStorage(st->st_IfStmt.es_Exp, flags, base, &limit);
6121 if (st->st_RetStmt.es_Exp && (flags & RESOLVE_FINALIZE))
6122 resolveExpStorage(st->st_RetStmt.es_Exp, flags, base, &limit);
6125 if (st->st_ResStmt.es_Exp && (flags & RESOLVE_FINALIZE))
6126 resolveExpStorage(st->st_ResStmt.es_Exp, flags, base, &limit);
6130 * The switch expression's temporary data must be saved while we are
6131 * executing the sub-statements (the cases).
6133 if (flags & RESOLVE_FINALIZE) {
6134 urunesize_t xlimit = base;
6135 resolveExpStorage(st->st_SwStmt.es_Exp, flags, base, &xlimit);
6142 if (st->st_CaseStmt.es_Exp && (flags & RESOLVE_FINALIZE))
6143 resolveExpStorage(st->st_CaseStmt.es_Exp, flags, base, &limit);
6146 if (flags & RESOLVE_FINALIZE)
6147 resolveExpStorage(st->st_ExpStmt.es_Exp, flags, base, &limit);
6149 case ST_ThreadSched:
6152 dassert_stmt(st, 0);
6156 * Calculate storage requirements for substatements. (base) may have
6157 * been adjusted if this statement level's temporary storage needs to be
6158 * retained (aka switch() expression).
6160 * Note that often scan->sg_MyGroup == sg.
6162 RUNE_FOREACH(scan, &st->st_List, st_Node) {
6163 dassert(scan->st_Op != ST_Proc);
6164 if (scan->st_Op == ST_Class) {
6165 if (scan->u.ClassStmt.es_Decl->d_Flags & DF_RESOLVED)
6166 ResolveStorage(scan, flags);
6167 } else if (scan->st_Op == ST_Decl) {
6169 * Ignore declarations here, they will be handled in the semgroup
6170 * scan in the next loop
6172 } else if (scan->st_Op == ST_Proc) {
6173 /* Do not resolve template procedures! */
6174 if (scan->st_ProcStmt.es_Decl->d_ProcDecl.ed_OrigBody == scan) {
6179 } else if (scan->st_Flags & STF_SEMTOP) {
6180 assert(scan->st_MyGroup != sg);
6181 if (scan->st_Flags & STF_RESOLVED)
6182 ResolveStorage(scan, flags);
6185 * This is a bit of a mess. The baseline sg_TmpBytes needs to be
6186 * set so calculated temporary offsets are relative to it, and
6187 * then restored. Otherwise we might blow away the
6188 * SGF_TMPRESOLVED SemGroup
6192 urunesize_t save_offset;
6193 urunesize_t save_goffset;
6195 if (scan->st_Flags & STF_RESOLVED) {
6196 save_offset = scan->st_MyGroup->sg_TmpBytes;
6197 save_goffset = scan->st_MyGroup->sg_GlobalTmpBytes;
6198 scan->st_MyGroup->sg_TmpBytes = base;
6199 scan->st_MyGroup->sg_GlobalTmpBytes = gbase;
6200 ResolveStorage(scan, flags);
6202 if (scan->st_MyGroup->sg_TmpBytes < save_offset)
6203 scan->st_MyGroup->sg_TmpBytes = save_offset;
6204 if (scan->st_MyGroup->sg_GlobalTmpBytes < save_goffset) {
6205 scan->st_MyGroup->sg_GlobalTmpBytes = save_goffset;
6207 if (limit < scan->st_MyGroup->sg_TmpBytes)
6208 limit = scan->st_MyGroup->sg_TmpBytes;
6209 if (glimit < scan->st_MyGroup->sg_GlobalTmpBytes)
6210 glimit = scan->st_MyGroup->sg_GlobalTmpBytes;
6216 * If this is a new semantic level call resolveSemGroupStorage() to do
6217 * the final cleanup of SemGroup issues. This will redundantly calculate
6218 * temporary space requirements. Also, due to type/class references the
6219 * temporary space for a class may have already been resolved. Since a
6220 * class can only contain declarations it had better match what we
6223 * Note that for non-Class executable SemGroup's TmpBytes is incorporated
6224 * in a downward fashion while sg_Bytes is incorporated in an upward
6225 * fashion. It can become quite confusing. Don't ask me why I did it
6228 if (st->st_Flags & STF_SEMANTIC) {
6229 resolveSemGroupStorage(sg, flags, limit, &limit, glimit, &glimit);
6231 sg->sg_TmpBytes = limit;
6232 sg->sg_GlobalTmpBytes = glimit;
6233 } /* else this is the Root st_Import */
6235 if ((st->st_Flags & (STF_SEMANTIC | STF_SEMTOP)) == STF_SEMANTIC) {
6236 SemGroup *psg = sg->sg_Parent;
6238 if (psg->sg_Bytes < sg->sg_Bytes)
6239 psg->sg_Bytes = sg->sg_Bytes;
6240 if (psg->sg_GlobalBytes < sg->sg_GlobalBytes)
6241 psg->sg_GlobalBytes = sg->sg_GlobalBytes;
6246 * resolveDeclAlign() - Resolve the alignment requiret to process
6249 * This is an expression tree traversal storage resolution procedure. We have
6250 * to traverse through declarations to get to default assignments and such.
6252 * If a declaration has no assigned default the underlying type may itself
6253 * have an assigned default which must be dealt with.
6256 resolveDeclAlign(Declaration *d, urunesize_t *expalignp, int flags)
6263 * RESOLVE_CLEAN - If set, RESOLVE_FINALIZE is also always set
6265 * RESOLVE_FINALIZE - Must also resolve indirect dependencies that
6266 * do not directly affect alignment of (d).
6268 if (flags & RESOLVE_CLEAN) {
6269 if ((d->d_RState & (RSF_ALIGN | RSF_SUB_ALIGN)) == 0)
6271 d->d_RState &= ~(RSF_ALIGN | RSF_SUB_ALIGN |
6272 RSF_STORAGE | RSF_SUB_STORAGE);
6273 } else if (flags & RESOLVE_FINALIZE) {
6274 if (d->d_RState & RSF_SUB_ALIGN) {
6275 if (*expalignp < d->d_AlignMask)
6276 *expalignp = d->d_AlignMask;
6279 d->d_RState |= RSF_ALIGN | RSF_SUB_ALIGN;
6281 if (d->d_RState & RSF_ALIGN) {
6282 if (*expalignp < d->d_AlignMask)
6283 *expalignp = d->d_AlignMask;
6286 d->d_RState |= RSF_ALIGN;
6291 /* recursion already dealt with */
6293 case DOP_ARGS_STORAGE:
6294 case DOP_STACK_STORAGE:
6295 case DOP_GROUP_STORAGE:
6297 Type *type = d->d_StorDecl.ed_Type;
6299 resolveTypeAlign(type, expalignp, flags);
6300 if (d->d_StorDecl.ed_AssExp) {
6301 resolveExpAlign(d->d_StorDecl.ed_AssExp, expalignp, flags);
6305 case DOP_GLOBAL_STORAGE:
6307 Type *type = d->d_StorDecl.ed_Type;
6309 resolveTypeAlign(type, expalignp, flags);
6310 if (d->d_StorDecl.ed_AssExp) {
6311 resolveExpAlign(d->d_StorDecl.ed_AssExp, expalignp, flags);
6317 * Never try to resolve storage considerations for an alias's
6318 * assignment in the declaration itself. The run-time context
6319 * depends on who and how many other parts of the program reference
6320 * the alias and the expression tree will be duplicated for each.
6324 /* XXX what about ty_AssExp ? should be in global space */
6327 /* recursion already dealt with */
6331 * Resolution of procedure declarations might have been deferred (see
6332 * TOK_ID in ResolveExp()).
6334 /* ResolveDecl(d, 0); */
6339 ptype = d->d_ProcDecl.ed_Type;
6340 resolveTypeAlign(ptype, expalignp, flags);
6342 if ((st = d->d_ProcDecl.ed_ProcBody) != NULL) {
6343 ResolveAlignment(st, flags);
6352 * Make sure that the semantic group associated with the declaration
6356 if (sg && (sg->sg_Op == SG_MODULE || sg->sg_Op == SG_CLASS)) {
6357 resolveSemGroupAlign(sg, flags);
6364 resolveDynamicDeclAlign(Declaration *d, urunesize_t *expalignp, int flags)
6368 resolveDeclAlign(d, expalignp, flags);
6370 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
6371 if (scan->d_MyGroup &&
6372 (scan->d_MyGroup->sg_Flags & (SGF_RESOLVING | SGF_RESOLVED)))
6374 resolveDeclAlign(scan, expalignp, flags);
6377 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
6378 if (scan->d_SubBase)
6379 resolveDynamicDeclAlign(scan, expalignp, flags);
6384 resolveDeclStorage(Declaration *d, int flags,
6385 urunesize_t base, urunesize_t *limitp,
6386 urunesize_t gbase, urunesize_t *glimitp)
6391 * sync-up any adjustments to base made by the caller
6395 if (*glimitp < gbase)
6399 * State machine for Storage (not used for cleaning)
6401 * RESOLVE_FINALIZE - Must also resolve indirect dependencies that
6402 * do not directly affect storage for (d).
6404 if (flags & RESOLVE_FINALIZE) {
6405 dassert_decl(d, d->d_RState & RSF_SUB_ALIGN);
6406 if (d->d_RState & RSF_SUB_STORAGE)
6408 d->d_RState |= RSF_STORAGE | RSF_SUB_STORAGE;
6410 dassert_decl(d, d->d_RState & RSF_ALIGN);
6411 if (d->d_RState & RSF_STORAGE)
6413 d->d_RState |= RSF_STORAGE;
6418 /* recursion already dealt with */
6420 case DOP_ARGS_STORAGE:
6421 case DOP_STACK_STORAGE:
6422 case DOP_GROUP_STORAGE:
6424 Type *type = d->d_StorDecl.ed_Type;
6426 resolveTypeStorage(type, 0, base, limitp);
6427 if (d->d_StorDecl.ed_AssExp && (flags & RESOLVE_FINALIZE))
6428 resolveExpStorage(d->d_StorDecl.ed_AssExp, flags, base, limitp);
6431 case DOP_GLOBAL_STORAGE:
6433 Type *type = d->d_StorDecl.ed_Type;
6435 resolveTypeStorage(type, RESOLVE_ISGLOB, gbase, glimitp);
6436 if (d->d_StorDecl.ed_AssExp && (flags & RESOLVE_FINALIZE)) {
6437 resolveExpStorage(d->d_StorDecl.ed_AssExp, flags,
6444 * Never try to resolve storage considerations for an alias's
6445 * assignment in the declaration itself. The run-time context
6446 * depends on who and how many other parts of the program reference
6447 * the alias and the expression tree will be duplicated for each.
6451 /* XXX what about ty_AssExp ? should be in global space */
6454 /* recursion already dealt with */
6461 ptype = d->d_ProcDecl.ed_Type;
6462 dassert(ptype->ty_Op == TY_PROC);
6463 resolveTypeStorage(ptype, flags, base, limitp);
6465 if ((st = d->d_ProcDecl.ed_ProcBody) != NULL) {
6466 if (flags & RESOLVE_FINALIZE)
6467 ResolveStorage(st, flags);
6476 * Make sure that the semantic group associated with the declaration
6480 if (sg && (sg->sg_Op == SG_MODULE || sg->sg_Op == SG_CLASS)) {
6481 resolveSemGroupStorage(sg, flags, 0, NULL, 0, NULL);
6487 resolveDynamicDeclStorage(Declaration *d, int flags,
6488 urunesize_t base, urunesize_t *limitp,
6489 urunesize_t gbase, urunesize_t *glimitp)
6494 * sync-up any adjustments to base made by the caller
6498 if (*glimitp < gbase)
6501 resolveDeclStorage(d, flags, base, limitp, gbase, glimitp);
6503 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
6504 if (scan->d_MyGroup &&
6505 (scan->d_MyGroup->sg_Flags & (SGF_RESOLVING | SGF_RESOLVED)))
6507 resolveDeclStorage(scan, flags,
6512 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
6513 if (scan->d_SubBase) {
6514 resolveDynamicDeclStorage(scan, flags,
6523 * resolveExpOnlyStorage()
6525 * Resolve temporary storage for this exp structure, do not recurse
6526 * sub-expressions. Any type-temporary storage is tacked onto the end of
6527 * this expression's temporary area.
6529 * We do not need to assign storage for expressions which return lvalues,
6530 * because they will simply return a pointer into non-temporary storage.
6533 resolveExpOnlyStorage(Exp *exp, int flags,
6534 urunesize_t base, urunesize_t *limitp)
6538 dassert(flags & RESOLVE_FINALIZE);
6539 dassert((exp->ex_RState & RSF_SUB_STORAGE) == 0);
6540 exp->ex_RState |= RSF_STORAGE | RSF_SUB_STORAGE;
6543 * sync-up any adjustments to base made by the caller
6550 * State machine for Storage (not used for cleaning)
6552 * RESOLVE_FINALIZE - Must also resolve indirect dependencies that
6553 * do not directly affect storage for (exp).
6555 if (flags & RESOLVE_FINALIZE) {
6556 dassert_exp(exp, exp->ex_RState & RSF_SUB_ALIGN);
6557 if (exp->ex_RState & RSF_SUB_STORAGE)
6559 exp->ex_RState |= RSF_STORAGE | RSF_SUB_STORAGE;
6561 dassert_exp(exp, exp->ex_RState & RSF_ALIGN);
6562 if (exp->ex_RState & RSF_STORAGE)
6564 exp->ex_RState |= RSF_STORAGE;
6569 * Adjust if expression resolves to a type rather than a value.
6570 * e.g. when you do something like switch (typeof(int)) { ... }.
6571 * Types are handled as thin pointers.
6573 if (exp->ex_Flags & EXF_RET_TYPE) {
6574 exp->ex_TmpOffset = BASEALIGN(base, RAWPTR_ALIGN);
6575 SIZELIMIT(base, sizeof(void *), limitp);
6579 * If the exp has a decl
6585 if (d->d_Flags & DF_RESOLVED) {
6586 resolveDeclStorage(d, flags, base, limitp, base, limitp);
6591 * Assign temporary offset. This offset does not overlap temporary space
6592 * reserved for sub-expressions.
6594 * We must have an assigned type. Expression sequences like:
6595 * 'module.blah' are collapsed into 'blah' long before we get here, or
6596 * they should be. We should not encounter any TOK_TCMV_ID expression
6597 * tokens. Structural id's (the right hand side of X.Y) are resolved by
6598 * their parent expression node and no typing or temporary space is
6601 * Expressions that return lvalues do not need temporary space.
6603 type = exp->ex_Type;
6605 switch (exp->ex_Token) {
6610 dasserts_exp(exp, 0, "Unhandled expression token");
6613 exp->ex_TmpOffset = -3;
6614 } else if (exp->ex_Flags2 & EX2F_LVALUE) {
6615 // (type->ty_SQFlags & SF_LVALUE)
6617 * Expressive elements which return lvalues do not get temporary
6618 * space. Note that this also prevents lvalues such as large arrays
6619 * (int ary[999999999]) from reserving unnecessary stack space.
6621 * NOTE: SF_LVALUE is now equivalent to SCOPE_LVALUE. It tells
6622 * us precisely whether the type is being stored as a
6623 * LValueStor or not.
6625 exp->ex_TmpOffset = -2;
6626 dassert_exp(exp, exp->ex_Token != TOK_COMPOUND);
6629 * Reserve temporary space for potential intermediate results.
6631 * Compound expressions may need extra space to default-init the
6632 * compound value, it is expected to be available to the generator
6633 * right after the nominal type in the TmpOffset. XXX also make
6634 * available to the interpreter?
6636 * Procedure calls also may need extra space to default-init the
6637 * return value. XXX also make available to the interpreter?
6639 base = BASEALIGN(base, type->ty_AlignMask);
6642 * It may be convenient to use a larger alignment for arrays, which
6643 * would allow (e.g.) %xmm registers to be used on 64-bit arrays for
6644 * moves. Limit to 16-byte alignment for now.
6646 * (See also resolveExpAlign())
6648 if (type->ty_Op == TY_ARYOF || type->ty_Op == TY_COMPOUND ||
6649 type->ty_Op == TY_ARGS) {
6650 if (type->ty_Bytes >= 16) {
6651 base = BASEALIGN(base, 15);
6652 } else if (type->ty_Bytes >= 8) {
6653 base = BASEALIGN(base, 7);
6654 } else if (type->ty_Bytes >= 4) {
6655 base = BASEALIGN(base, 3);
6660 * Temporary storage for this exp
6662 exp->ex_TmpOffset = base;
6663 SIZELIMIT(base, type->ty_Bytes, limitp);
6666 * A compound expression's type may need additional temporary
6667 * storage. NOTE: The type might not yet be changed to TY_COMPOUND,
6668 * but single-element compounds will use the same temporary space as
6671 * A procedure call may need additional temporary storage.
6673 * (base was adjusted above and is exp->ex_TmpOffset)
6675 if (exp->ex_Token == TOK_COMPOUND) {
6677 * NOTE: type might not yet be changed to compound, but
6678 * single-element compound will use the same temporary space.
6680 resolveTypeStorage(type, RESOLVE_FINALIZE,
6681 base + type->ty_Bytes, limitp);
6682 } else if (exp->ex_Token == TOK_CALL) {
6683 resolveTypeStorage(type, RESOLVE_FINALIZE,
6684 base + type->ty_TmpBytes, limitp);
6687 dassert(exp->ex_TmpOffset != -1);
6691 * Calculate the overlapping temporary space for sub-expression trees.
6693 * (flags already asserted and RState adjusted by resolveExpOnlyStorage())
6696 resolveExpSubStorage(Exp *exp, int flags,
6697 urunesize_t base, urunesize_t *limitp)
6702 * sync-up any adjustments to base made by the caller
6708 resolveTypeStorage(exp->ex_Type, flags, base, limitp);
6711 * Make sure resolved declarations have resolved temporary storage for
6712 * assigned expressions. XXX pure test
6714 if (exp->ex_Token == TOK_ID || exp->ex_Token == TOK_CLASSID) {
6718 if (d && (d->d_Flags & DF_RESOLVED)) {
6719 resolveDeclStorage(d, flags, base, limitp, base, limitp);
6721 /* note: UNARY can be set for aliases */
6725 * Used only by TOK_INLINE_CALL, calculates the temporary storage
6726 * base for the inline procedure's body.
6731 * Calculate the overlapping temporary space for sub-trees.
6733 if (exp->ex_Flags & EXF_BINARY) {
6735 * Ensure lhs's NON-RECURSIVE temporary storage on-return does not
6736 * intefere with rhs's, or vise-versa.
6738 * In addition, neither the lhs or rhs sides interfere with their
6739 * parent node's temporary storage, which is particularly important
6740 * for setting up the return storage for TOK_CALLs (etc).
6742 * To do this offset the rhs storage by the non-recursive lhs
6747 resolveExpStorage(exp->ex_Lhs, flags, base, &blimit);
6748 if (exp->ex_Lhs->ex_TmpOffset >= 0) {
6749 resolveExpStorage(exp->ex_Rhs, flags,
6750 exp->ex_Lhs->ex_TmpOffset +
6751 exp->ex_Lhs->ex_Type->ty_Bytes,
6754 resolveExpStorage(exp->ex_Rhs, flags, base, &blimit);
6756 SIZELIMIT(blimit, 0, limitp);
6757 } else if (exp->ex_Flags & EXF_UNARY) {
6758 resolveExpStorage(exp->ex_Lhs, flags, base, limitp);
6759 dassert_exp(exp, exp->ex_Lhs->ex_Next == NULL);
6760 } else if (exp->ex_Flags & EXF_COMPOUND) {
6762 * Each element will be copied into the compound storage in turn, so
6763 * we can union the temporary storage required for each element.
6767 for (scan = exp->ex_Lhs; scan; scan = scan->ex_Next) {
6768 dassert_exp(scan, scan->ex_Type != NULL);
6769 resolveExpStorage(scan, flags, base, limitp);
6773 if (exp->ex_Token == TOK_CALL) {
6777 resolveDynamicProcedureStorage(exp, flags, base, limitp, base, limitp);
6778 } else if (exp->ex_Token == TOK_INLINE_CALL) {
6780 * For an inlined-procedure, the procedure body has been dup'd and
6781 * thus should be entirely unique. We have to incorporate its
6782 * temporary storage into our own.
6784 * The inlined procedure body's temporary space must begin after
6785 * the temporary space we reserve for the arguments and return
6786 * value (aka blimit).
6788 Stmt *st = exp->ex_AuxStmt;
6789 SemGroup *sg = st->st_MyGroup;
6791 /* dassert((exp->ex_Flags & EXF_DUPEXP) == 0); */
6792 dassert(sg->sg_Parent);
6793 dassert(exp->ex_Flags & EXF_BINARY);
6794 dassert((st->st_Flags & (STF_SEMTOP | STF_SEMANTIC)) == STF_SEMANTIC);
6795 dassert((flags & RESOLVE_FINALIZE) &&
6796 (st->st_RState & RSF_SUB_STORAGE) == 0);
6798 sg->sg_TmpBytes = BASEALIGN(blimit, sg->sg_TmpAlignMask);
6799 ResolveStorage(st, flags);
6800 SIZELIMIT(sg->sg_TmpBytes, 0, limitp);
6802 resolveDynamicProcedureStorage(exp, flags, base, limitp, base, limitp);
6807 * Resolve all storage parameters for an expression tree. When dealing
6808 * with an expression tree, we do not overlap the parent's temporary space
6809 * with that of either the lhs or rhs, and we do not overlap the lhs's
6810 * result storage with the rhs. However, the remainder of the lhs's
6811 * temporary space can be overlapped with the rhs.
6813 * WARNING! Storage parameters for executable code cannot be re-resolved,
6814 * assert once-only. Resolve-time constant evaluation should be
6815 * interpreted just once.
6818 resolveExpStorage(Exp *exp, int flags, urunesize_t base, urunesize_t *limitp)
6820 dassert(flags & RESOLVE_FINALIZE);
6821 dassert((exp->ex_RState & RSF_SUB_STORAGE) == 0);
6824 * sync-up any adjustments to base made by the caller
6829 resolveExpOnlyStorage(exp, flags, base, limitp);
6830 if (exp->ex_TmpOffset >= 0) {
6831 resolveExpSubStorage(exp, flags,
6833 exp->ex_Type->ty_Bytes,
6836 resolveExpSubStorage(exp, flags, base, limitp);
6841 resolveExpAlign(Exp *exp, urunesize_t *expalignp, int flags)
6848 * RESOLVE_CLEAN - If set, RESOLVE_FINALIZE is also always set
6850 * RESOLVE_FINALIZE - Must also resolve indirect dependencies that
6851 * do not directly affect alignment of (type).
6853 * NOTE: SGF_RESOLVED might not be set, indicating that we were able to
6854 * pick-out individual declarations in (global) SGs without having
6855 * to resolve the whole group. This allows unused declarations
6856 * to be omitted by the code generator.
6859 if (flags & RESOLVE_CLEAN) {
6860 if ((exp->ex_RState & (RSF_ALIGN | RSF_SUB_ALIGN)) == 0)
6862 exp->ex_RState &= ~(RSF_ALIGN | RSF_SUB_ALIGN |
6863 RSF_STORAGE | RSF_SUB_STORAGE);
6864 } else if (flags & RESOLVE_FINALIZE) {
6865 if (exp->ex_RState & RSF_SUB_ALIGN)
6867 exp->ex_RState |= RSF_ALIGN | RSF_SUB_ALIGN;
6869 if (exp->ex_RState & RSF_ALIGN)
6871 exp->ex_RState |= RSF_ALIGN;
6874 if (exp->ex_Flags & EXF_RET_TYPE) {
6875 if (*expalignp < RAWPTR_ALIGN)
6876 *expalignp = RAWPTR_ALIGN;
6877 // type resolution might wind up being more complex so
6879 //if ((flags & RESOLVE_FINALIZE) == 0)
6884 * Any associated type
6886 type = exp->ex_Type;
6888 if (type->ty_SQFlags & SF_LVALUE) {
6889 if (*expalignp < LVALUESTOR_ALIGN)
6890 *expalignp = LVALUESTOR_ALIGN;
6892 if (*expalignp < type->ty_AlignMask)
6893 *expalignp = type->ty_AlignMask;
6895 resolveTypeAlign(type, expalignp, flags | RESOLVE_FINALIZE);
6898 * It may be convenient to use a larger alignment for arrays, which
6899 * would allow (e.g.) %xmm registers to be used on 64-bit arrays for
6900 * moves. Limit to 16-byte alignment for now.
6902 * (See also resolveExpOnlyStorage())
6904 if (type->ty_Op == TY_ARYOF || type->ty_Op == TY_COMPOUND ||
6905 type->ty_Op == TY_ARGS)
6908 if (type->ty_Bytes >= 64) {
6909 if (*expalignp < 63)
6911 } else if (type->ty_Bytes >= 32) {
6912 if (*expalignp < 31)
6916 if (type->ty_Bytes >= 16) {
6917 if (*expalignp < 15)
6919 } else if (type->ty_Bytes >= 8) {
6922 } else if (type->ty_Bytes >= 4) {
6930 * Any associated declaration
6936 if (d->d_Flags & DF_RESOLVED) {
6937 resolveDeclAlign(d, expalignp, flags);
6942 * Misc special cases
6944 switch(exp->ex_Token) {
6947 * Alignment for dynamic procedures
6949 resolveDynamicProcedureAlign(exp, expalignp, flags);
6951 case TOK_INLINE_CALL:
6953 * Recurse through for an inline call, then roll-up the alignment
6954 * requirement(s) for the target procedure. We handle the 'arguments'
6955 * and 'return value' alignment in EXF_BINARY below.
6960 ResolveAlignment(exp->ex_AuxStmt, flags);
6961 asg = exp->ex_AuxStmt->st_MyGroup;
6962 if (*expalignp < asg->sg_TmpAlignMask)
6963 *expalignp = asg->sg_TmpAlignMask;
6964 resolveDynamicProcedureAlign(exp, expalignp, flags);
6972 * Nominal lhs, rhs, and compound recursion
6974 if (exp->ex_Flags & EXF_BINARY) {
6975 resolveExpAlign(exp->ex_Lhs, expalignp, flags);
6976 resolveExpAlign(exp->ex_Rhs, expalignp, flags);
6977 } else if (exp->ex_Flags & EXF_UNARY) {
6978 resolveExpAlign(exp->ex_Lhs, expalignp, flags);
6979 } else if (exp->ex_Flags & EXF_COMPOUND) {
6982 for (scan = exp->ex_Lhs; scan; scan = scan->ex_Next) {
6983 resolveExpAlign(scan, expalignp, flags);
6989 * resolveTypeAlign()
6991 * Figure out the temporary space required to initialize a type's defaults.
6992 * Note that the space will be figured independantly for any SemGroup's.
6996 resolveTypeAlign(Type *type, urunesize_t *expalignp, int flags)
6998 SemGroup *sg = NULL;
6999 Type *subtype1 = NULL;
7000 Type *subtype2 = NULL;
7003 dassert(type->ty_Flags & TF_RESOLVED);
7008 * RESOLVE_CLEAN - If set, RESOLVE_FINALIZE is also always set
7010 * RESOLVE_FINALIZE - Must also resolve indirect dependencies that
7011 * do not directly affect alignment of (type).
7013 if (flags & RESOLVE_CLEAN) {
7014 if ((type->ty_RState & (RSF_ALIGN | RSF_SUB_ALIGN)) == 0)
7016 type->ty_RState &= ~(RSF_ALIGN | RSF_SUB_ALIGN |
7017 RSF_STORAGE | RSF_SUB_STORAGE);
7018 } else if (flags & RESOLVE_FINALIZE) {
7019 if (type->ty_RState & RSF_SUB_ALIGN) {
7020 if (*expalignp < type->ty_TmpAlignMask)
7021 *expalignp = type->ty_TmpAlignMask;
7024 type->ty_RState |= RSF_ALIGN | RSF_SUB_ALIGN;
7026 if (type->ty_RState & RSF_ALIGN) {
7027 if (*expalignp < type->ty_TmpAlignMask)
7028 *expalignp = type->ty_TmpAlignMask;
7031 type->ty_RState |= RSF_ALIGN;
7034 switch (type->ty_Op) {
7036 sg = type->ty_ClassType.et_SemGroup;
7039 subtype1 = type->ty_AryType.et_Type;
7042 sg = type->ty_CompType.et_SemGroup;
7045 subtype1 = type->ty_ProcType.et_ArgsType;
7046 subtype2 = type->ty_ProcType.et_RetType;
7049 sg = type->ty_ImportType.et_SemGroup;
7052 sg = type->ty_ArgsType.et_SemGroup;
7055 sg = type->ty_VarType.et_SemGroup;
7058 if (flags & RESOLVE_FINALIZE) {
7059 pass2 = type->ty_RawPtrType.et_Type;
7060 resolveTypeAlign(pass2, &pass2->ty_TmpAlignMask, flags);
7062 /* has nothing to do with initializing the pointer */
7063 /* subtype1 = type->ty_RawPtrType.et_Type; */
7066 if (flags & RESOLVE_FINALIZE) {
7067 pass2 = type->ty_RefType.et_Type;
7068 resolveTypeAlign(pass2, &pass2->ty_TmpAlignMask, flags);
7070 /* has nothing to do with initializing the pointer */
7071 /* subtype1 = type->ty_RefType.et_Type; */
7076 * nothing to be done here.
7079 case TY_UNRESOLVED: /* should be no unresolved types now */
7081 dassert_type(type, 0);
7085 resolveTypeAlign(subtype1, &subtype1->ty_TmpAlignMask, flags);
7086 if (subtype1->ty_AssExp && (flags & RESOLVE_FINALIZE)) {
7087 resolveExpAlign(subtype1->ty_AssExp,
7088 &subtype1->ty_TmpAlignMask, flags);
7090 if (type->ty_TmpAlignMask < subtype1->ty_TmpAlignMask)
7091 type->ty_TmpAlignMask = subtype1->ty_TmpAlignMask;
7094 resolveTypeAlign(subtype2, &subtype2->ty_TmpAlignMask, flags);
7095 if (subtype2->ty_AssExp && (flags & RESOLVE_FINALIZE)) {
7096 resolveExpAlign(subtype2->ty_AssExp,
7097 &subtype2->ty_TmpAlignMask, flags);
7099 if (type->ty_TmpAlignMask < subtype2->ty_TmpAlignMask)
7100 type->ty_TmpAlignMask = subtype2->ty_TmpAlignMask;
7102 if (type->ty_AssExp && (flags & RESOLVE_FINALIZE)) {
7103 resolveExpAlign(type->ty_AssExp,
7104 &type->ty_TmpAlignMask, flags);
7107 dassert(sg->sg_Flags & SGF_RESOLVED);
7108 /* ResolveSemGroup(sg, 0); */
7109 resolveSemGroupAlign(sg, flags);
7110 if (type->ty_TmpAlignMask < sg->sg_TmpAlignMask)
7111 type->ty_TmpAlignMask = sg->sg_TmpAlignMask;
7113 if (*expalignp < type->ty_TmpAlignMask)
7114 *expalignp = type->ty_TmpAlignMask;
7118 * Resolve the storage offsets (after alignment pass).
7120 * RESOLVE_ISGLOB - Global storage is separately calculated from
7121 * type-instantiated storage.
7123 * RESOLVE_FINALIZE - Indicates that we should resolve all underlying
7124 * types, typically because the type was referenced
7125 * via an expression.
7129 resolveTypeStorage(Type *type, int flags,
7130 urunesize_t base, urunesize_t *limitp)
7132 SemGroup *sg = NULL;
7133 Type *subtype1 = NULL;
7134 Type *subtype2 = NULL;
7138 * NOTE: This function will unconditionally max *limitp against base
7139 * so no need to do it right here.
7142 * State machine for Storage (not used for cleaning)
7144 * RESOLVE_FINALIZE - Must also resolve indirect dependencies that
7145 * do not directly affect storage for (type).
7147 if (flags & RESOLVE_FINALIZE) {
7148 dassert_type(type, type->ty_RState & RSF_SUB_ALIGN);
7149 if (type->ty_RState & RSF_SUB_STORAGE) {
7150 base = BASEALIGN(base, type->ty_TmpAlignMask);
7151 SIZELIMIT(base, type->ty_TmpBytes, limitp);
7154 type->ty_RState |= RSF_STORAGE | RSF_SUB_STORAGE;
7156 dassert_type(type, type->ty_RState & RSF_ALIGN);
7157 if (type->ty_RState & RSF_STORAGE) {
7158 base = BASEALIGN(base, type->ty_TmpAlignMask);
7159 SIZELIMIT(base, type->ty_TmpBytes, limitp);
7162 type->ty_RState |= RSF_STORAGE;
7165 switch (type->ty_Op) {
7167 sg = type->ty_ClassType.et_SemGroup;
7170 subtype1 = type->ty_AryType.et_Type;
7173 sg = type->ty_CompType.et_SemGroup;
7176 subtype1 = type->ty_ProcType.et_ArgsType;
7177 subtype2 = type->ty_ProcType.et_RetType;
7180 sg = type->ty_ImportType.et_SemGroup;
7183 sg = type->ty_ArgsType.et_SemGroup;
7186 sg = type->ty_VarType.et_SemGroup;
7189 if (flags & RESOLVE_FINALIZE) {
7190 pass2 = type->ty_RawPtrType.et_Type;
7191 resolveTypeStorage(pass2, flags, 0, &pass2->ty_TmpBytes);
7195 if (flags & RESOLVE_FINALIZE) {
7196 pass2 = type->ty_RefType.et_Type;
7197 resolveTypeStorage(pass2, flags, 0, &pass2->ty_TmpBytes);
7203 * nothing to be done here.
7206 case TY_UNRESOLVED: /* should be no unresolved types now */
7208 dassert_type(type, 0);
7212 resolveTypeStorage(subtype1, flags, 0, &subtype1->ty_TmpBytes);
7213 if (subtype1->ty_AssExp && (flags & RESOLVE_FINALIZE)) {
7214 /* XXX base is 0? */
7215 resolveExpStorage(subtype1->ty_AssExp, flags,
7216 0, &subtype1->ty_TmpBytes);
7218 base = BASEALIGN(base, subtype1->ty_TmpAlignMask);
7219 SIZELIMIT(base, subtype1->ty_TmpBytes, limitp);
7221 if (type->ty_TmpAlignMask < subtype1->ty_TmpAlignMask)
7222 type->ty_TmpAlignMask = subtype1->ty_TmpAlignMask;
7226 resolveTypeStorage(subtype2, flags, 0, &subtype2->ty_TmpBytes);
7227 if (subtype2->ty_AssExp && (flags & RESOLVE_FINALIZE)) {
7228 /* XXX base is 0? */
7229 resolveExpStorage(subtype2->ty_AssExp, flags,
7230 0, &subtype2->ty_TmpBytes);
7232 base = BASEALIGN(base, subtype2->ty_TmpAlignMask);
7233 SIZELIMIT(base, subtype2->ty_TmpBytes, limitp);
7235 if (type->ty_TmpAlignMask < subtype2->ty_TmpAlignMask)
7236 type->ty_TmpAlignMask = subtype2->ty_TmpAlignMask;
7239 if (type->ty_AssExp && (flags & RESOLVE_FINALIZE)) {
7240 /* XXX base is 0? */
7241 resolveExpStorage(type->ty_AssExp, flags, 0, &type->ty_TmpBytes);
7245 dassert(sg->sg_Flags & SGF_RESOLVED);
7246 resolveSemGroupStorage(sg, flags, 0, NULL, 0, NULL);
7247 if (flags & RESOLVE_ISGLOB) {
7249 base = BASEALIGN(base, sg->sg_GlobalAlignMask);
7250 base = BASEALIGN(base, sg->sg_TmpAlignMask);
7251 SIZELIMIT(base, sg->sg_GlobalTmpBytes, limitp);
7253 base = BASEALIGN(base, sg->sg_TmpAlignMask);
7254 SIZELIMIT(base, sg->sg_TmpBytes, limitp);
7258 * Re-resolve the type flags. XXX mostly fixed once I handled
7259 * CBase/DBase/GBase in resolveSemGroup1().
7261 if (sg->sg_Flags & SGF_HASASS)
7262 type->ty_Flags |= TF_HASASS;
7263 if (sg->sg_Flags & SGF_HASPTR)
7264 type->ty_Flags |= TF_HASPTR;
7266 type->ty_Flags |= TF_HASLVREF;
7267 if (sg->sg_Flags & SGF_VARARGS)
7268 type->ty_Flags |= TF_HASLVREF; /* XXX TF_VARARGS */
7270 type->ty_Flags |= TF_HASCONSTRUCT;
7272 type->ty_Flags |= TF_HASDESTRUCT;
7276 * Catch-all (catches caller adjustment to base)
7278 SIZELIMIT(base, 0, limitp);
7282 * This is used to resolve temporary storage requirements for SemGroup's
7283 * related to classes and compound types. Temporary storage requirements are
7284 * calculated on a SemGroup-by-SemGroup basis and not aggregated into any
7287 * In the final pass we also reverse the constructor and destructor lists
7288 * (sg_CBase and sg_DBase), and the pointer/lvalue list (SRBase). These
7289 * lists were originally constructed by prepending and are thus in the wrong
7294 resolveSemGroupAlign(SemGroup *sg, int flags)
7301 * RESOLVE_CLEAN - If set, RESOLVE_FINALIZE is also always set
7303 * RESOLVE_FINALIZE - Must also resolve indirect dependencies that
7304 * do not directly affect alignment of (type).
7306 * NOTE: SGF_RESOLVED might not be set, indicating that we were able to
7307 * pick-out individual declarations in (global) SGs without having
7308 * to resolve the whole group. This allows unused declarations
7309 * to be omitted by the code generator.
7312 if (flags & RESOLVE_CLEAN) {
7313 if ((sg->sg_RState & (RSF_ALIGN | RSF_SUB_ALIGN)) == 0)
7315 sg->sg_RState &= ~(RSF_ALIGN | RSF_SUB_ALIGN |
7316 RSF_STORAGE | RSF_SUB_STORAGE);
7317 } else if (flags & RESOLVE_FINALIZE) {
7318 if (sg->sg_RState & RSF_SUB_ALIGN)
7320 sg->sg_RState |= RSF_ALIGN | RSF_SUB_ALIGN;
7322 if (sg->sg_RState & RSF_ALIGN)
7324 sg->sg_RState |= RSF_ALIGN;
7327 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
7329 if ((d->d_ScopeFlags & (SCOPE_CONSTRUCTOR |
7330 SCOPE_DESTRUCTOR))) {
7331 if ((sg->sg_Flags & SGF_RESOLVED) == 0 &&
7332 (sg->sg_Op == SG_MODULE || sg->sg_Op == SG_CLASS)) {
7333 ResolveSemGroup(sg, 0);
7337 if ((d->d_Flags & DF_RESOLVED) == 0)
7339 resolveDeclAlign(d, &sg->sg_TmpAlignMask, flags);
7340 if (d->d_ScopeFlags & SCOPE_GLOBAL) {
7341 if (sg->sg_GlobalAlignMask < d->d_AlignMask)
7342 sg->sg_GlobalAlignMask = d->d_AlignMask;
7344 if (sg->sg_AlignMask < d->d_AlignMask)
7345 sg->sg_AlignMask = d->d_AlignMask;
7352 resolveSemGroupStorage(SemGroup *sg, int flags,
7353 urunesize_t base, urunesize_t *limitp,
7354 urunesize_t gbase, urunesize_t *glimitp)
7358 urunesize_t dummy_limit = 0;
7359 urunesize_t dummy_glimit = 0;
7362 * sync-up any adjustments to base made by the caller
7364 if (limitp && *limitp < base)
7366 if (glimitp && *glimitp < gbase)
7370 * State machine for Storage (not used for cleaning)
7372 * RESOLVE_FINALIZE - Must also resolve indirect dependencies that
7373 * do not directly affect storage for (sg).
7375 if (flags & RESOLVE_FINALIZE) {
7376 dassert_sg(sg, sg->sg_RState & RSF_SUB_ALIGN);
7377 if (sg->sg_RState & RSF_SUB_STORAGE)
7379 sg->sg_RState |= RSF_STORAGE | RSF_SUB_STORAGE;
7381 dassert_sg(sg, sg->sg_RState & RSF_ALIGN);
7382 if (sg->sg_RState & RSF_STORAGE)
7384 sg->sg_RState |= RSF_STORAGE;
7387 if (limitp == NULL) {
7388 limitp = &dummy_limit;
7389 glimitp = &dummy_glimit;
7395 RUNE_FOREACH(d, &sg->sg_DeclList, d_Node) {
7396 if (d->d_Flags & DF_RESOLVED) {
7397 resolveDeclStorage(d, flags, base, limitp, gbase, glimitp);
7404 if (flags & RESOLVE_FINALIZE) {
7405 if ((d2 = sg->sg_CBase) != NULL) {
7406 sg->sg_CBase = NULL;
7407 while ((d = d2) != NULL) {
7409 d->d_CNext = sg->sg_CBase;
7413 if ((d2 = sg->sg_DBase) != NULL) {
7414 sg->sg_DBase = NULL;
7415 while ((d = d2) != NULL) {
7417 d->d_DNext = sg->sg_DBase;
7421 if ((d2 = sg->sg_GBase) != NULL) {
7422 sg->sg_GBase = NULL;
7423 while ((d = d2) != NULL) {
7425 d->d_GNext = sg->sg_GBase;
7429 if ((d2 = sg->sg_SRBase) != NULL) {
7430 sg->sg_SRBase = NULL;
7431 while ((d = d2) != NULL) {
7433 d->d_SRNext = sg->sg_SRBase;
7439 sg->sg_TmpBytes = *limitp;
7440 sg->sg_GlobalTmpBytes = *glimitp;
7444 * If we are resolving to a dynamic method call we need to flag all matching
7445 * current subclass decls for (d) not yet resolved to ensure they get
7446 * resolved if their related class is used at all, since the dynamic method
7447 * call might be trying to call any of them.
7449 static void resolveDynamicDecl(Declaration *d);
7453 resolveDynamicProcedure(SemGroup * isg __unused, SemGroup * sg __unused,
7454 Exp * exp, int flags __unused)
7461 type = lhs->ex_Lhs->ex_Type;
7464 if (lhs->ex_Token != TOK_STRIND || type->ty_Op != TY_REFTO)
7466 type = type->ty_RefType.et_Type;
7467 dassert_exp(exp, type->ty_Op == TY_CLASS);
7469 resolveDynamicDecl(d);
7474 resolveDynamicProcedureAlign(Exp *exp, urunesize_t *expalignp, int flags)
7481 type = lhs->ex_Lhs->ex_Type;
7484 if (lhs->ex_Token != TOK_STRIND || type->ty_Op != TY_REFTO)
7486 type = type->ty_RefType.et_Type;
7487 dassert_exp(exp, type->ty_Op == TY_CLASS);
7489 resolveDynamicDeclAlign(d, expalignp, flags);
7494 resolveDynamicProcedureStorage(Exp *exp, int flags,
7495 urunesize_t base, urunesize_t *limitp,
7496 urunesize_t gbase, urunesize_t *glimitp)
7503 * sync-up any adjustments to base made by the caller
7505 if (limitp && *limitp < base)
7507 if (glimitp && *glimitp < gbase)
7511 type = lhs->ex_Lhs->ex_Type;
7514 if (lhs->ex_Token != TOK_STRIND || type->ty_Op != TY_REFTO)
7516 type = type->ty_RefType.et_Type;
7517 dassert_exp(exp, type->ty_Op == TY_CLASS);
7519 resolveDynamicDeclStorage(d, flags, base, limitp, gbase, glimitp);
7524 resolveDynamicDecl(Declaration *d)
7528 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
7529 scan->d_Flags |= DF_DYNAMICREF;
7530 if (scan->d_MyGroup &&
7531 (scan->d_MyGroup->sg_Flags & (SGF_RESOLVING | SGF_RESOLVED)))
7533 ResolveDecl(scan, 0);
7536 for (scan = d->d_SubBase; scan; scan = scan->d_SubNext) {
7537 if (scan->d_SubBase)
7538 resolveDynamicDecl(scan);
7543 * Handle everything required to inline a procedure. Small procedures are
7544 * automatically inlined unless 'noinline' is specified. 'inline' must be
7545 * specified to inline large procedures. We can only inline when we know the
7546 * exact procedure in question, so ref-based method calls tend to prevent
7549 typedef struct xinline {
7550 struct xinline *prev;
7551 struct xinline *next;
7555 xinline_t XInlineTop;
7556 xinline_t *XInlineBot = &XInlineTop;
7560 resolveProcedureInline(SemGroup *isg __unused, SemGroup *sg __unused,
7561 Exp *exp, int flags)
7572 * Do not inline of internal, clang call, marked as noinline, or
7573 * threaded. Do not inline a function which will probably return a
7574 * constant (and be optimized into one directly, inlining will slower
7575 * things down in that situation).
7577 if (d->d_ScopeFlags & (SCOPE_INTERNAL | SCOPE_CLANG | SCOPE_NOINLINE))
7579 if (d->d_ScopeFlags & (SCOPE_THREAD))
7581 if (exp->ex_Flags & EXF_PROBCONST)
7585 * XXX optimize this if the reference type is known explicitly, otherwise
7586 * we can't inline since it requires a dynamic call.
7588 if (lhs->ex_Token == TOK_STRIND && lhs->ex_Lhs->ex_Type->ty_Op == TY_REFTO)
7592 * For now do not try to combine global data because each inline will get
7593 * its own instantiation, which is not what the programmer expects.
7595 st = d->d_ProcDecl.ed_ProcBody;
7598 if (st->st_MyGroup->sg_GlobalBytes || st->st_MyGroup->sg_GlobalTmpBytes)
7602 * XXX we should be able to allow var-args inlines, why doesn't this
7605 if (d->d_ProcDecl.ed_Type->ty_ProcType.et_ArgsType->
7606 ty_CompType.et_SemGroup->sg_Flags & SGF_VARARGS)
7612 * Do not inline the same procedure recursively, or if we can optimize
7613 * the procedure call into a constant by interpreting it once.
7615 if (d->d_Flags & DF_INLINING)
7617 if (exp->ex_Flags & EXF_CONST)
7621 * Do not inline if we do not know the precise procedure at resolve-time.
7623 if (d->d_Op != DOP_PROC || lhs->ex_Type->ty_Op == TY_REFTO)
7626 xin = zalloc(sizeof(*xin));
7627 xin->prev = XInlineBot;
7629 XInlineBot->next = xin;
7633 * We inline the procedure by duplicating the procedure body and changing
7634 * the procedure call ex. Disallow recursive inlining.
7636 * Set PARSE_TYPE on exLhs to retain exLhs->ex_Type across any further
7637 * duplication for the TOK_INLINE_CALL switch.
7639 d->d_Flags |= DF_INLINING;
7641 dassert((exp->ex_Flags & EXF_DUPEXP) == 0);
7642 exp->ex_Lhs->ex_Flags |= EXF_PARSE_TYPE;
7643 st = d->d_ProcDecl.ed_ProcBody;
7644 if (st->st_MyGroup->sg_Complexity < RuneInlineComplexity) {
7648 char buf[RUNE_IDTOSTR_LEN];
7652 printf("InlineTest: %5d", st->st_MyGroup->sg_Complexity);
7653 for (xscan = XInlineTop.next; xscan; xscan = xscan->next) {
7654 printf(".%s", runeid_text(xscan->d->d_Id, buf));
7658 altsg = st->st_MyGroup->sg_Parent;
7659 dassert(st->st_Flags & STF_SEMANTIC);
7662 * Each inlining inserts a fresh copy of the procedure in question.
7664 st = DupStmt(st->st_MyGroup, NULL, d->d_ProcDecl.ed_OrigBody);
7665 st->st_ProcStmt.es_Decl = d;
7666 st->st_ProcStmt.es_Scope = d->d_Scope;
7667 st->st_Flags |= STF_INLINED_PROC;
7668 exp->ex_Token = TOK_INLINE_CALL;
7669 exp->ex_AuxStmt = st;
7670 dassert(st->st_RState == 0);
7673 * XXX sg_AltContext is actually what we want to have priority for
7674 * searches, not sg_Parent!
7676 ResolveStmt(d->d_ImportSemGroup, st, flags);
7677 st->st_MyGroup->sg_AltContext = altsg;
7678 st->st_MyGroup->sg_Flags |= SGF_ALTPRIORITY;
7681 * Link the inlined procedure's semantic context with our own so
7682 * stack storage is properly calculated. We must clear STF_SEMTOP
7683 * here or the alignment recursion will restart at 0.
7685 dassert(st->st_Flags & STF_SEMTOP);
7686 dassert(st->st_Flags & STF_SEMANTIC);
7687 st->st_Flags &= ~STF_SEMTOP;
7688 st->st_MyGroup->sg_Parent = sg;
7689 /* ResolveExp(isg, sg, exp, exp->ex_Type, flags); */
7692 d->d_Flags &= ~DF_INLINING;
7693 XInlineBot->next = NULL;
7694 XInlineBot = xin->prev;
7695 zfree(xin, sizeof(*xin));
7699 SpecialSemGroupGet(runeid_t id)
7707 case RUNEID_VA_COUNT:
7710 case RUNEID_VA_TYPE:
7713 case RUNEID_VA_DATA:
7716 case RUNEID_VA_VARCOUNT:
7717 s = SPECIAL_VAR_COUNT;
7719 case RUNEID_VA_VARTYPE:
7720 s = SPECIAL_VAR_TYPE;
7722 case RUNEID_VA_VARDATA:
7723 s = SPECIAL_VAR_DATA;
7725 case RUNEID_VA_TYPEID:
7728 case RUNEID_VA_TYPESTR:
7729 s = SPECIAL_TYPESTR;
7739 * Fixup the 'this' argument to the correct subclass and add d_Search to
7740 * point to the original superclass. This allowed replicated method
7741 * procedures to operate on all fields of a subclass via 'this', even
7742 * if they moved around or changed type.
7744 * parse2.c added the 'this' argument so we can assert that it exists
7746 * NOTE: This occurs in pass1 (ResolveClasses()) and cannot resolve
7747 * types, declarations, or anything else at this time.
7750 ResolveMethodProcedureThisArg(SemGroup *subsg, Declaration *pd)
7756 type = pd->d_ProcDecl.ed_Type;
7757 sg = type->ty_ProcType.et_ArgsType->ty_ArgsType.et_SemGroup;
7758 d = RUNE_FIRST(&sg->sg_DeclList);
7759 dassert_decl(d, d->d_Id == RUNEID_THIS &&
7760 (d->d_Op == DOP_ARGS_STORAGE || d->d_Op == DOP_TYPEDEF));
7763 * 'this' inherits d_Search from the method procedure declaration,
7764 * which is set when superclass declarations are merged into a subclass
7765 * in ResolveClasses()
7767 d->d_Search = pd->d_Search;
7770 * Type of 'this' argument. Must be:
7774 * lvalue class @this
7775 * lvalue class *this
7777 * Automatic fixup if the 'this' argument. Change from 'class' to
7778 * 'subclass', and retain the SF_LVALUE qualifier.
7780 * We do this unconditionally, ignoring AUTOTHIS
7782 * d_Search will have already been set to the original class.
7784 if (/*(d->d_Flags & DF_AUTOTHIS) && */ d->d_Op == DOP_ARGS_STORAGE) {
7790 stype = d->d_StorDecl.ed_Type;
7791 sqflags = stype->ty_SQFlags;
7792 sexp = stype->ty_OrigAssExp;
7793 vis = stype->ty_Visibility;
7796 if (d->d_Flags & DF_AUTOTHIS) {
7798 * The parser put this here so just adjust what we know is
7801 if (subsg->sg_ClassType->ty_SQFlags & SF_STRUCT) {
7802 stype = TypeToRawPtrType(subsg->sg_ClassType);
7804 stype = TypeToRefType(subsg->sg_ClassType);
7808 * Replace the programmer-supplied this class with the proper
7809 * subclass. Retain the LVALUE qualifier and type default exp,
7812 if (stype->ty_Op == TY_PTRTO)
7813 stype = TypeToRawPtrType(subsg->sg_ClassType);
7814 else if (stype->ty_Op == TY_REFTO)
7815 stype = TypeToRefType(subsg->sg_ClassType);
7818 if (stype->ty_SQFlags != (sqflags & SF_LVALUE) || sexp)
7819 stype = TypeToQualType(stype, NULL, sexp,
7820 sqflags & SF_LVALUE,
7823 d->d_StorDecl.ed_Type = stype;
7825 if (/*(d->d_Flags & DF_AUTOTHIS) && */ d->d_Op == DOP_TYPEDEF) {
7826 d->d_TypedefDecl.ed_Type = subsg->sg_ClassType;
7831 checkUnrestrictedType(Declaration *d, Type *type)
7834 switch(type->ty_Op) {
7836 type = type->ty_AryType.et_Type;
7839 if ((type->ty_SQFlags & SCOPE_STRUCT) == 0)
7840 dfatal_decl(d, TOK_ERR_CLASS_STRUCT_EMBED, NULL);