diff --git a/src/ir/module-utils.cpp b/src/ir/module-utils.cpp index a16592d15a7..1efbe490a07 100644 --- a/src/ir/module-utils.cpp +++ b/src/ir/module-utils.cpp @@ -399,15 +399,10 @@ struct CodeScanner } } else if (auto* get = curr->dynCast()) { info.note(get->ref->type); - // TODO: Just include curr->type for AllTypes and UsedIRTypes to avoid - // this special case and to avoid emitting unnecessary types in binaries. - info.include(get->type); } else if (auto* set = curr->dynCast()) { info.note(set->ref->type); } else if (auto* get = curr->dynCast()) { info.note(get->ref->type); - // See above. - info.include(get->type); } else if (auto* set = curr->dynCast()) { info.note(set->ref->type); } else if (auto* contBind = curr->dynCast()) { @@ -468,20 +463,6 @@ InsertOrderedMap collectHeapTypeInfo( } } - // TODO: Remove this once we remove the hack for StructGet and StructSet in - // CodeScanner. - if (inclusion == TypeInclusion::UsedIRTypes) { - auto it = info.info.begin(); - while (it != info.info.end()) { - if (it->second.useCount == 0) { - auto deleted = it++; - info.info.erase(deleted); - } else { - ++it; - } - } - } - // Recursively traverse each reference type, which may have a child type that // is itself a reference type. This reflects an appearance in the binary // format that is in the type section itself. As we do this we may find more diff --git a/src/wasm/wasm.cpp b/src/wasm/wasm.cpp index 98146dfbcc3..87ae6ac5a38 100644 --- a/src/wasm/wasm.cpp +++ b/src/wasm/wasm.cpp @@ -1008,7 +1008,25 @@ void CallRef::finalize() { return; } assert(target->type.isRef()); - if (target->type.getHeapType().isBottom()) { + if (target->type.isNull()) { + // If this call_ref has been optimized to have a null reference, then it + // will definitely trap. We could update the type to be unreachable, but + // that would violate the invariant that non-branch instructions other than + // `unreachable` can only be unreachable if they have unreachable children. + // Make the result type as close to `unreachable` as possible without + // actually making it unreachable. TODO: consider just making this + // unreachable instead (and similar in other GC accessors), although this + // would currently cause the parser to admit more invalid modules. + if (type.isRef()) { + type = Type(type.getHeapType().getBottom(), NonNullable); + } else if (type.isTuple()) { + Tuple elems; + for (auto t : type) { + elems.push_back( + t.isRef() ? Type(t.getHeapType().getBottom(), NonNullable) : t); + } + type = Type(elems); + } return; } assert(target->type.getHeapType().isSignature()); @@ -1136,7 +1154,12 @@ void StructNew::finalize() { void StructGet::finalize() { if (ref->type == Type::unreachable) { type = Type::unreachable; - } else if (!ref->type.isNull()) { + } else if (ref->type.isNull()) { + // See comment on CallRef for explanation. + if (type.isRef()) { + type = Type(type.getHeapType().getBottom(), NonNullable); + } + } else { type = ref->type.getHeapType().getStruct().fields[index].type; } } @@ -1180,7 +1203,12 @@ void ArrayNewFixed::finalize() { void ArrayGet::finalize() { if (ref->type == Type::unreachable || index->type == Type::unreachable) { type = Type::unreachable; - } else if (!ref->type.isNull()) { + } else if (ref->type.isNull()) { + // See comment on CallRef for explanation. + if (type.isRef()) { + type = Type(type.getHeapType().getBottom(), NonNullable); + } + } else { type = ref->type.getHeapType().getArray().element.type; } } diff --git a/test/lit/passes/local-subtyping.wast b/test/lit/passes/local-subtyping.wast index d5fc3d63d71..04890521dea 100644 --- a/test/lit/passes/local-subtyping.wast +++ b/test/lit/passes/local-subtyping.wast @@ -274,7 +274,7 @@ ;; CHECK: (func $multiple-iterations-refinalize-call-ref-bottom (type $0) ;; CHECK-NEXT: (local $f nullfuncref) - ;; CHECK-NEXT: (local $x anyref) + ;; CHECK-NEXT: (local $x (ref none)) ;; CHECK-NEXT: (local.set $f ;; CHECK-NEXT: (ref.null nofunc) ;; CHECK-NEXT: )