From 97965c7c0a3e53ea92e7a51892b94acf12472268 Mon Sep 17 00:00:00 2001 From: vegorov-rbx <75688451+vegorov-rbx@users.noreply.github.com> Date: Fri, 12 May 2023 20:50:47 +0300 Subject: [PATCH] Sync to upstream/release/576 (#928) * `ClassType` can now have an indexer defined on it. This allows custom types to be used in `t[x]` expressions. * Fixed search for closest executable breakpoint line. Previously, breakpoints might have been skipped in `else` blocks at the end of a function * Fixed how unification is performed for two optional types `a? <: b?`, previously it might have unified either 'a' or 'b' with 'nil'. Note that this fix is not enabled by default yet (see the list in `ExperimentalFlags.h`) In the new type solver, a concept of 'Type Families' has been introduced. Type families can be thought of as type aliases with custom type inference/reduction logic included with them. For example, we can have an `Add` type family that will resolve the type that is the result of adding two values together. This will help type inference to figure out what 'T' and 'U' might be when explicit type annotations are not provided. In this update we don't define any type families, but they will be added in the near future. It is also possible for Luau embedders to define their own type families in the global/environment scope. Other changes include: * Fixed scope used to find out which generic types should be included in the function generic type list * Fixed a crash after cyclic bound types were created during unification And in native code generation (jit): * Use of arm64 target on M1 now requires macOS 13 * Entry into native code has been optimized. This is especially important for coroutine call/pcall performance as they involve going through a C call frame * LOP_LOADK(X) translation into IR has been improved to enable type tag/constant propagation * arm64 can use integer immediate values to synthesize floating-point values * x64 assembler removes duplicate 64bit numbers from the data section to save space * Linux `perf` can now be used to profile native Luau code (when running with --codegen-perf CLI argument) --- Analysis/include/Luau/Constraint.h | 23 +- Analysis/include/Luau/ConstraintSolver.h | 2 + Analysis/include/Luau/Error.h | 27 +- Analysis/include/Luau/Quantify.h | 28 +- Analysis/include/Luau/TxnLog.h | 13 +- Analysis/include/Luau/Type.h | 38 ++- Analysis/include/Luau/TypeFamily.h | 115 ++++++++ Analysis/include/Luau/TypePack.h | 17 +- Analysis/include/Luau/Unifier.h | 11 +- Analysis/include/Luau/VisitType.h | 39 +++ Analysis/src/Clone.cpp | 63 ++++- Analysis/src/ConstraintGraphBuilder.cpp | 3 +- Analysis/src/ConstraintSolver.cpp | 147 +++++++--- Analysis/src/Error.cpp | 24 ++ Analysis/src/IostreamHelpers.cpp | 4 + Analysis/src/Quantify.cpp | 30 +- Analysis/src/Substitution.cpp | 72 ++++- Analysis/src/ToDot.cpp | 9 + Analysis/src/ToString.cpp | 119 ++++++-- Analysis/src/TxnLog.cpp | 118 +++++++- Analysis/src/TypeAttach.cpp | 10 + Analysis/src/TypeChecker2.cpp | 47 +++- Analysis/src/TypeFamily.cpp | 310 +++++++++++++++++++++ Analysis/src/TypeInfer.cpp | 100 ++++++- Analysis/src/Unifier.cpp | 214 +++++++++++++- CLI/Repl.cpp | 28 ++ CodeGen/include/Luau/AssemblyBuilderA64.h | 4 +- CodeGen/include/Luau/AssemblyBuilderX64.h | 3 + CodeGen/include/Luau/CodeBlockUnwind.h | 2 + CodeGen/include/Luau/CodeGen.h | 10 +- CodeGen/src/AssemblyBuilderA64.cpp | 53 ++-- CodeGen/src/AssemblyBuilderX64.cpp | 33 ++- CodeGen/src/CodeBlockUnwind.cpp | 28 +- CodeGen/src/CodeGen.cpp | 88 ++++-- CodeGen/src/EmitCommonX64.cpp | 2 +- CodeGen/src/IrLoweringA64.cpp | 26 +- CodeGen/src/IrTranslation.cpp | 39 ++- CodeGen/src/NativeState.h | 2 + Common/include/Luau/ExperimentalFlags.h | 2 + Sources.cmake | 4 + VM/src/ldebug.cpp | 38 ++- VM/src/lvmexecute.cpp | 10 +- bench/tests/matrixmult.lua | 39 +++ bench/tests/mesh-normal-scalar.lua | 254 +++++++++++++++++ tests/AssemblyBuilderA64.test.cpp | 3 +- tests/AssemblyBuilderX64.test.cpp | 25 +- tests/ClassFixture.cpp | 13 + tests/CodeAllocator.test.cpp | 4 + tests/Conformance.test.cpp | 13 +- tests/TxnLog.test.cpp | 113 ++++++++ tests/TypeFamily.test.cpp | 205 ++++++++++++++ tests/TypeInfer.classes.test.cpp | 146 ++++++++++ tests/TypeInfer.functions.test.cpp | 36 +++ tests/TypeInfer.intersectionTypes.test.cpp | 18 +- tests/TypeInfer.operators.test.cpp | 28 +- tests/TypeInfer.provisional.test.cpp | 35 --- tests/TypeInfer.singletons.test.cpp | 6 +- tests/TypeInfer.test.cpp | 65 +++++ tests/TypeInfer.tryUnify.test.cpp | 87 ++++++ tests/TypeInfer.unionTypes.test.cpp | 78 ++++++ tests/conformance/debugger.lua | 13 + tools/faillist.txt | 9 +- 62 files changed, 2869 insertions(+), 276 deletions(-) create mode 100644 Analysis/include/Luau/TypeFamily.h create mode 100644 Analysis/src/TypeFamily.cpp create mode 100644 bench/tests/matrixmult.lua create mode 100644 bench/tests/mesh-normal-scalar.lua create mode 100644 tests/TxnLog.test.cpp create mode 100644 tests/TypeFamily.test.cpp diff --git a/Analysis/include/Luau/Constraint.h b/Analysis/include/Luau/Constraint.h index c7bc58b..3aa3c86 100644 --- a/Analysis/include/Luau/Constraint.h +++ b/Analysis/include/Luau/Constraint.h @@ -198,9 +198,26 @@ struct UnpackConstraint TypePackId sourcePack; }; -using ConstraintV = Variant; +// ty ~ reduce ty +// +// Try to reduce ty, if it is a TypeFamilyInstanceType. Otherwise, do nothing. +struct ReduceConstraint +{ + TypeId ty; +}; + +// tp ~ reduce tp +// +// Analogous to ReduceConstraint, but for type packs. +struct ReducePackConstraint +{ + TypePackId tp; +}; + +using ConstraintV = + Variant; struct Constraint { diff --git a/Analysis/include/Luau/ConstraintSolver.h b/Analysis/include/Luau/ConstraintSolver.h index 6888e99..f6b1aed 100644 --- a/Analysis/include/Luau/ConstraintSolver.h +++ b/Analysis/include/Luau/ConstraintSolver.h @@ -121,6 +121,8 @@ struct ConstraintSolver bool tryDispatch(const SetIndexerConstraint& c, NotNull constraint, bool force); bool tryDispatch(const SingletonOrTopTypeConstraint& c, NotNull constraint); bool tryDispatch(const UnpackConstraint& c, NotNull constraint); + bool tryDispatch(const ReduceConstraint& c, NotNull constraint, bool force); + bool tryDispatch(const ReducePackConstraint& c, NotNull constraint, bool force); // for a, ... in some_table do // also handles __iter metamethod diff --git a/Analysis/include/Luau/Error.h b/Analysis/include/Luau/Error.h index 8571430..6264a0b 100644 --- a/Analysis/include/Luau/Error.h +++ b/Analysis/include/Luau/Error.h @@ -329,12 +329,27 @@ struct DynamicPropertyLookupOnClassesUnsafe bool operator==(const DynamicPropertyLookupOnClassesUnsafe& rhs) const; }; -using TypeErrorData = Variant; +struct UninhabitedTypeFamily +{ + TypeId ty; + + bool operator==(const UninhabitedTypeFamily& rhs) const; +}; + +struct UninhabitedTypePackFamily +{ + TypePackId tp; + + bool operator==(const UninhabitedTypePackFamily& rhs) const; +}; + +using TypeErrorData = + Variant; struct TypeErrorSummary { diff --git a/Analysis/include/Luau/Quantify.h b/Analysis/include/Luau/Quantify.h index c86512f..b562c54 100644 --- a/Analysis/include/Luau/Quantify.h +++ b/Analysis/include/Luau/Quantify.h @@ -2,6 +2,9 @@ #pragma once #include "Luau/Type.h" +#include "Luau/DenseHash.h" + +#include namespace Luau { @@ -10,6 +13,29 @@ struct TypeArena; struct Scope; void quantify(TypeId ty, TypeLevel level); -std::optional quantify(TypeArena* arena, TypeId ty, Scope* scope); + +// TODO: This is eerily similar to the pattern that NormalizedClassType +// implements. We could, and perhaps should, merge them together. +template +struct OrderedMap +{ + std::vector keys; + DenseHashMap pairings{nullptr}; + + void push(K k, V v) + { + keys.push_back(k); + pairings[k] = v; + } +}; + +struct QuantifierResult +{ + TypeId result; + OrderedMap insertedGenerics; + OrderedMap insertedGenericPacks; +}; + +std::optional quantify(TypeArena* arena, TypeId ty, Scope* scope); } // namespace Luau diff --git a/Analysis/include/Luau/TxnLog.h b/Analysis/include/Luau/TxnLog.h index 0ed8a49..907908d 100644 --- a/Analysis/include/Luau/TxnLog.h +++ b/Analysis/include/Luau/TxnLog.h @@ -19,6 +19,10 @@ struct PendingType // The pending Type state. Type pending; + // On very rare occasions, we need to delete an entry from the TxnLog. + // DenseHashMap does not afford that so we note its deadness here. + bool dead = false; + explicit PendingType(Type state) : pending(std::move(state)) { @@ -61,10 +65,11 @@ T* getMutable(PendingTypePack* pending) // Log of what TypeIds we are rebinding, to be committed later. struct TxnLog { - TxnLog() + explicit TxnLog(bool useScopes = false) : typeVarChanges(nullptr) , typePackChanges(nullptr) , ownedSeen() + , useScopes(useScopes) , sharedSeen(&ownedSeen) { } @@ -297,6 +302,12 @@ private: void popSeen(TypeOrPackId lhs, TypeOrPackId rhs); public: + // There is one spot in the code where TxnLog has to reconcile collisions + // between parallel logs. In that codepath, we have to work out which of two + // FreeTypes subsumes the other. If useScopes is false, the TypeLevel is + // used. Else we use the embedded Scope*. + bool useScopes = false; + // Used to avoid infinite recursion when types are cyclic. // Shared with all the descendent TxnLogs. std::vector>* sharedSeen; diff --git a/Analysis/include/Luau/Type.h b/Analysis/include/Luau/Type.h index c615b8f..80a044c 100644 --- a/Analysis/include/Luau/Type.h +++ b/Analysis/include/Luau/Type.h @@ -23,6 +23,7 @@ LUAU_FASTINT(LuauTableTypeMaximumStringifierLength) LUAU_FASTINT(LuauTypeMaximumStringifierLength) +LUAU_FASTFLAG(LuauTypecheckClassTypeIndexers) namespace Luau { @@ -31,6 +32,8 @@ struct TypeArena; struct Scope; using ScopePtr = std::shared_ptr; +struct TypeFamily; + /** * There are three kinds of type variables: * - `Free` variables are metavariables, which stand for unconstrained types. @@ -489,6 +492,7 @@ struct ClassType Tags tags; std::shared_ptr userData; ModuleName definitionModuleName; + std::optional indexer; ClassType(Name name, Props props, std::optional parent, std::optional metatable, Tags tags, std::shared_ptr userData, ModuleName definitionModuleName) @@ -501,6 +505,35 @@ struct ClassType , definitionModuleName(definitionModuleName) { } + + ClassType(Name name, Props props, std::optional parent, std::optional metatable, Tags tags, + std::shared_ptr userData, ModuleName definitionModuleName, std::optional indexer) + : name(name) + , props(props) + , parent(parent) + , metatable(metatable) + , tags(tags) + , userData(userData) + , definitionModuleName(definitionModuleName) + , indexer(indexer) + { + LUAU_ASSERT(FFlag::LuauTypecheckClassTypeIndexers); + } +}; + +/** + * An instance of a type family that has not yet been reduced to a more concrete + * type. The constraint solver receives a constraint to reduce each + * TypeFamilyInstanceType to a concrete type. A design detail is important to + * note here: the parameters for this instantiation of the type family are + * contained within this type, so that they can be substituted. + */ +struct TypeFamilyInstanceType +{ + NotNull family; + + std::vector typeArguments; + std::vector packArguments; }; struct TypeFun @@ -640,8 +673,9 @@ struct NegationType using ErrorType = Unifiable::Error; -using TypeVariant = Unifiable::Variant; +using TypeVariant = + Unifiable::Variant; struct Type final { diff --git a/Analysis/include/Luau/TypeFamily.h b/Analysis/include/Luau/TypeFamily.h new file mode 100644 index 0000000..4c04f52 --- /dev/null +++ b/Analysis/include/Luau/TypeFamily.h @@ -0,0 +1,115 @@ +// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details +#pragma once + +#include "Luau/Error.h" +#include "Luau/NotNull.h" +#include "Luau/Variant.h" + +#include +#include +#include + +namespace Luau +{ + +struct Type; +using TypeId = const Type*; + +struct TypePackVar; +using TypePackId = const TypePackVar*; + +struct TypeArena; +struct BuiltinTypes; +struct TxnLog; + +/// Represents a reduction result, which may have successfully reduced the type, +/// may have concretely failed to reduce the type, or may simply be stuck +/// without more information. +template +struct TypeFamilyReductionResult +{ + /// The result of the reduction, if any. If this is nullopt, the family + /// could not be reduced. + std::optional result; + /// Whether the result is uninhabited: whether we know, unambiguously and + /// permanently, whether this type family reduction results in an + /// uninhabitable type. This will trigger an error to be reported. + bool uninhabited; + /// Any types that need to be progressed or mutated before the reduction may + /// proceed. + std::vector blockedTypes; + /// Any type packs that need to be progressed or mutated before the + /// reduction may proceed. + std::vector blockedPacks; +}; + +/// Represents a type function that may be applied to map a series of types and +/// type packs to a single output type. +struct TypeFamily +{ + /// The human-readable name of the type family. Used to stringify instance + /// types. + std::string name; + + /// The reducer function for the type family. + std::function( + std::vector, std::vector, NotNull, NotNull, NotNull log)> + reducer; +}; + +/// Represents a type function that may be applied to map a series of types and +/// type packs to a single output type pack. +struct TypePackFamily +{ + /// The human-readable name of the type pack family. Used to stringify + /// instance packs. + std::string name; + + /// The reducer function for the type pack family. + std::function( + std::vector, std::vector, NotNull, NotNull, NotNull log)> + reducer; +}; + +struct FamilyGraphReductionResult +{ + ErrorVec errors; + DenseHashSet blockedTypes{nullptr}; + DenseHashSet blockedPacks{nullptr}; + DenseHashSet reducedTypes{nullptr}; + DenseHashSet reducedPacks{nullptr}; +}; + +/** + * Attempt to reduce all instances of any type or type pack family in the type + * graph provided. + * + * @param entrypoint the entry point to the type graph. + * @param location the location the reduction is occurring at; used to populate + * type errors. + * @param arena an arena to allocate types into. + * @param builtins the built-in types. + * @param log a TxnLog to use. If one is provided, substitution will take place + * against the TxnLog, otherwise substitutions will directly mutate the type + * graph. Do not provide the empty TxnLog, as a result. + */ +FamilyGraphReductionResult reduceFamilies( + TypeId entrypoint, Location location, NotNull arena, NotNull builtins, TxnLog* log = nullptr, bool force = false); + +/** + * Attempt to reduce all instances of any type or type pack family in the type + * graph provided. + * + * @param entrypoint the entry point to the type graph. + * @param location the location the reduction is occurring at; used to populate + * type errors. + * @param arena an arena to allocate types into. + * @param builtins the built-in types. + * @param log a TxnLog to use. If one is provided, substitution will take place + * against the TxnLog, otherwise substitutions will directly mutate the type + * graph. Do not provide the empty TxnLog, as a result. + */ +FamilyGraphReductionResult reduceFamilies( + TypePackId entrypoint, Location location, NotNull arena, NotNull builtins, TxnLog* log = nullptr, bool force = false); + +} // namespace Luau diff --git a/Analysis/include/Luau/TypePack.h b/Analysis/include/Luau/TypePack.h index e78a66b..d159aa4 100644 --- a/Analysis/include/Luau/TypePack.h +++ b/Analysis/include/Luau/TypePack.h @@ -12,11 +12,13 @@ namespace Luau { struct TypeArena; +struct TypePackFamily; struct TxnLog; struct TypePack; struct VariadicTypePack; struct BlockedTypePack; +struct TypeFamilyInstanceTypePack; struct TypePackVar; using TypePackId = const TypePackVar*; @@ -50,10 +52,10 @@ struct GenericTypePack }; using BoundTypePack = Unifiable::Bound; - using ErrorTypePack = Unifiable::Error; -using TypePackVariant = Unifiable::Variant; +using TypePackVariant = + Unifiable::Variant; /* A TypePack is a rope-like string of TypeIds. We use this structure to encode * notions like packs of unknown length and packs of any length, as well as more @@ -83,6 +85,17 @@ struct BlockedTypePack static size_t nextIndex; }; +/** + * Analogous to a TypeFamilyInstanceType. + */ +struct TypeFamilyInstanceTypePack +{ + NotNull family; + + std::vector typeArguments; + std::vector packArguments; +}; + struct TypePackVar { explicit TypePackVar(const TypePackVariant& ty); diff --git a/Analysis/include/Luau/Unifier.h b/Analysis/include/Luau/Unifier.h index 742f029..d5db06c 100644 --- a/Analysis/include/Luau/Unifier.h +++ b/Analysis/include/Luau/Unifier.h @@ -64,9 +64,11 @@ struct Unifier Variance variance = Covariant; bool normalize = true; // Normalize unions and intersections if necessary bool checkInhabited = true; // Normalize types to check if they are inhabited - bool useScopes = false; // If true, we use the scope hierarchy rather than TypeLevels CountMismatch::Context ctx = CountMismatch::Arg; + // If true, generics act as free types when unifying. + bool hideousFixMeGenericsAreActuallyFree = false; + UnifierSharedState& sharedState; // When the Unifier is forced to unify two blocked types (or packs), they @@ -78,6 +80,10 @@ struct Unifier Unifier( NotNull normalizer, Mode mode, NotNull scope, const Location& location, Variance variance, TxnLog* parentLog = nullptr); + // Configure the Unifier to test for scope subsumption via embedded Scope + // pointers rather than TypeLevels. + void enableScopeTests(); + // Test whether the two type vars unify. Never commits the result. ErrorVec canUnify(TypeId subTy, TypeId superTy); ErrorVec canUnify(TypePackId subTy, TypePackId superTy, bool isFunctionCall = false); @@ -159,6 +165,9 @@ private: // Available after regular type pack unification errors std::optional firstPackErrorPos; + + // If true, we use the scope hierarchy rather than TypeLevels + bool useScopes = false; }; void promoteTypeLevels(TxnLog& log, const TypeArena* arena, TypeLevel minLevel, Scope* outerScope, bool useScope, TypePackId tp); diff --git a/Analysis/include/Luau/VisitType.h b/Analysis/include/Luau/VisitType.h index 663627d..b6dcf1f 100644 --- a/Analysis/include/Luau/VisitType.h +++ b/Analysis/include/Luau/VisitType.h @@ -159,6 +159,10 @@ struct GenericTypeVisitor { return visit(ty); } + virtual bool visit(TypeId ty, const TypeFamilyInstanceType& tfit) + { + return visit(ty); + } virtual bool visit(TypePackId tp) { @@ -192,6 +196,10 @@ struct GenericTypeVisitor { return visit(tp); } + virtual bool visit(TypePackId tp, const TypeFamilyInstanceTypePack& tfitp) + { + return visit(tp); + } void traverse(TypeId ty) { @@ -272,6 +280,15 @@ struct GenericTypeVisitor if (ctv->metatable) traverse(*ctv->metatable); + + if (FFlag::LuauTypecheckClassTypeIndexers) + { + if (ctv->indexer) + { + traverse(ctv->indexer->indexType); + traverse(ctv->indexer->indexResultType); + } + } } } else if (auto atv = get(ty)) @@ -327,6 +344,17 @@ struct GenericTypeVisitor if (visit(ty, *ntv)) traverse(ntv->ty); } + else if (auto tfit = get(ty)) + { + if (visit(ty, *tfit)) + { + for (TypeId p : tfit->typeArguments) + traverse(p); + + for (TypePackId p : tfit->packArguments) + traverse(p); + } + } else LUAU_ASSERT(!"GenericTypeVisitor::traverse(TypeId) is not exhaustive!"); @@ -376,6 +404,17 @@ struct GenericTypeVisitor } else if (auto btp = get(tp)) visit(tp, *btp); + else if (auto tfitp = get(tp)) + { + if (visit(tp, *tfitp)) + { + for (TypeId t : tfitp->typeArguments) + traverse(t); + + for (TypePackId t : tfitp->packArguments) + traverse(t); + } + } else LUAU_ASSERT(!"GenericTypeVisitor::traverse(TypePackId) is not exhaustive!"); diff --git a/Analysis/src/Clone.cpp b/Analysis/src/Clone.cpp index 450b84a..0c1b24a 100644 --- a/Analysis/src/Clone.cpp +++ b/Analysis/src/Clone.cpp @@ -52,6 +52,12 @@ Property clone(const Property& prop, TypeArena& dest, CloneState& cloneState) } } +static TableIndexer clone(const TableIndexer& indexer, TypeArena& dest, CloneState& cloneState) +{ + LUAU_ASSERT(FFlag::LuauTypecheckClassTypeIndexers); + return TableIndexer{clone(indexer.indexType, dest, cloneState), clone(indexer.indexResultType, dest, cloneState)}; +} + struct TypePackCloner; /* @@ -98,6 +104,7 @@ struct TypeCloner void operator()(const UnknownType& t); void operator()(const NeverType& t); void operator()(const NegationType& t); + void operator()(const TypeFamilyInstanceType& t); }; struct TypePackCloner @@ -171,6 +178,22 @@ struct TypePackCloner if (t.tail) destTp->tail = clone(*t.tail, dest, cloneState); } + + void operator()(const TypeFamilyInstanceTypePack& t) + { + TypePackId cloned = dest.addTypePack(TypeFamilyInstanceTypePack{t.family, {}, {}}); + TypeFamilyInstanceTypePack* destTp = getMutable(cloned); + LUAU_ASSERT(destTp); + seenTypePacks[typePackId] = cloned; + + destTp->typeArguments.reserve(t.typeArguments.size()); + for (TypeId ty : t.typeArguments) + destTp->typeArguments.push_back(clone(ty, dest, cloneState)); + + destTp->packArguments.reserve(t.packArguments.size()); + for (TypePackId tp : t.packArguments) + destTp->packArguments.push_back(clone(tp, dest, cloneState)); + } }; template @@ -288,8 +311,16 @@ void TypeCloner::operator()(const TableType& t) for (const auto& [name, prop] : t.props) ttv->props[name] = clone(prop, dest, cloneState); - if (t.indexer) - ttv->indexer = TableIndexer{clone(t.indexer->indexType, dest, cloneState), clone(t.indexer->indexResultType, dest, cloneState)}; + if (FFlag::LuauTypecheckClassTypeIndexers) + { + if (t.indexer) + ttv->indexer = clone(*t.indexer, dest, cloneState); + } + else + { + if (t.indexer) + ttv->indexer = TableIndexer{clone(t.indexer->indexType, dest, cloneState), clone(t.indexer->indexResultType, dest, cloneState)}; + } for (TypeId& arg : ttv->instantiatedTypeParams) arg = clone(arg, dest, cloneState); @@ -327,6 +358,12 @@ void TypeCloner::operator()(const ClassType& t) if (t.metatable) ctv->metatable = clone(*t.metatable, dest, cloneState); + + if (FFlag::LuauTypecheckClassTypeIndexers) + { + if (t.indexer) + ctv->indexer = clone(*t.indexer, dest, cloneState); + } } void TypeCloner::operator()(const AnyType& t) @@ -389,6 +426,28 @@ void TypeCloner::operator()(const NegationType& t) asMutable(result)->ty = NegationType{ty}; } +void TypeCloner::operator()(const TypeFamilyInstanceType& t) +{ + TypeId result = dest.addType(TypeFamilyInstanceType{ + t.family, + {}, + {}, + }); + + seenTypes[typeId] = result; + + TypeFamilyInstanceType* tfit = getMutable(result); + LUAU_ASSERT(tfit != nullptr); + + tfit->typeArguments.reserve(t.typeArguments.size()); + for (TypeId p : t.typeArguments) + tfit->typeArguments.push_back(clone(p, dest, cloneState)); + + tfit->packArguments.reserve(t.packArguments.size()); + for (TypePackId p : t.packArguments) + tfit->packArguments.push_back(clone(p, dest, cloneState)); +} + } // anonymous namespace TypePackId clone(TypePackId tp, TypeArena& dest, CloneState& cloneState) diff --git a/Analysis/src/ConstraintGraphBuilder.cpp b/Analysis/src/ConstraintGraphBuilder.cpp index e07fe70..c8d99ad 100644 --- a/Analysis/src/ConstraintGraphBuilder.cpp +++ b/Analysis/src/ConstraintGraphBuilder.cpp @@ -728,6 +728,7 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocalFun }); addConstraint(scope, std::move(c)); + module->astTypes[function->func] = functionType; return ControlFlow::None; } @@ -1475,7 +1476,7 @@ Inference ConstraintGraphBuilder::check( Checkpoint endCheckpoint = checkpoint(this); TypeId generalizedTy = arena->addType(BlockedType{}); - NotNull gc = addConstraint(scope, expr->location, GeneralizationConstraint{generalizedTy, sig.signature}); + NotNull gc = addConstraint(sig.signatureScope, expr->location, GeneralizationConstraint{generalizedTy, sig.signature}); forEachConstraint(startCheckpoint, endCheckpoint, this, [gc](const ConstraintPtr& constraint) { gc->dependencies.emplace_back(constraint.get()); diff --git a/Analysis/src/ConstraintSolver.cpp b/Analysis/src/ConstraintSolver.cpp index f1f868a..488fd4b 100644 --- a/Analysis/src/ConstraintSolver.cpp +++ b/Analysis/src/ConstraintSolver.cpp @@ -16,6 +16,7 @@ #include "Luau/Type.h" #include "Luau/Unifier.h" #include "Luau/VisitType.h" +#include "Luau/TypeFamily.h" LUAU_FASTFLAGVARIABLE(DebugLuauLogSolver, false); LUAU_FASTFLAG(LuauRequirePathTrueModuleName) @@ -226,6 +227,32 @@ void dump(ConstraintSolver* cs, ToStringOptions& opts) } } +struct InstantiationQueuer : TypeOnceVisitor +{ + ConstraintSolver* solver; + NotNull scope; + Location location; + + explicit InstantiationQueuer(NotNull scope, const Location& location, ConstraintSolver* solver) + : solver(solver) + , scope(scope) + , location(location) + { + } + + bool visit(TypeId ty, const PendingExpansionType& petv) override + { + solver->pushConstraint(scope, location, TypeAliasExpansionConstraint{ty}); + return false; + } + + bool visit(TypeId ty, const TypeFamilyInstanceType& tfit) override + { + solver->pushConstraint(scope, location, ReduceConstraint{ty}); + return true; + } +}; + ConstraintSolver::ConstraintSolver(NotNull normalizer, NotNull rootScope, std::vector> constraints, ModuleName moduleName, NotNull moduleResolver, std::vector requireCycles, DcrLogger* logger) : arena(normalizer->arena) @@ -441,6 +468,10 @@ bool ConstraintSolver::tryDispatch(NotNull constraint, bool fo success = tryDispatch(*sottc, constraint); else if (auto uc = get(*constraint)) success = tryDispatch(*uc, constraint); + else if (auto rc = get(*constraint)) + success = tryDispatch(*rc, constraint, force); + else if (auto rpc = get(*constraint)) + success = tryDispatch(*rpc, constraint, force); else LUAU_ASSERT(false); @@ -479,13 +510,19 @@ bool ConstraintSolver::tryDispatch(const GeneralizationConstraint& c, NotNull(generalizedType)) return block(generalizedType, constraint); - std::optional generalized = quantify(arena, c.sourceType, constraint->scope); + std::optional generalized = quantify(arena, c.sourceType, constraint->scope); if (generalized) { if (get(generalizedType)) - asMutable(generalizedType)->ty.emplace(*generalized); + asMutable(generalizedType)->ty.emplace(generalized->result); else - unify(generalizedType, *generalized, constraint->scope); + unify(generalizedType, generalized->result, constraint->scope); + + for (auto [free, gen] : generalized->insertedGenerics.pairings) + unify(free, gen, constraint->scope); + + for (auto [free, gen] : generalized->insertedGenericPacks.pairings) + unify(free, gen, constraint->scope); } else { @@ -504,6 +541,9 @@ bool ConstraintSolver::tryDispatch(const InstantiationConstraint& c, NotNullscope); std::optional instantiated = inst.substitute(c.superType); @@ -512,6 +552,9 @@ bool ConstraintSolver::tryDispatch(const InstantiationConstraint& c, NotNull(c.subType)); asMutable(c.subType)->ty.emplace(*instantiated); + InstantiationQueuer queuer{constraint->scope, constraint->location, this}; + queuer.traverse(c.subType); + unblock(c.subType); return true; @@ -953,26 +996,6 @@ struct InfiniteTypeFinder : TypeOnceVisitor } }; -struct InstantiationQueuer : TypeOnceVisitor -{ - ConstraintSolver* solver; - NotNull scope; - Location location; - - explicit InstantiationQueuer(NotNull scope, const Location& location, ConstraintSolver* solver) - : solver(solver) - , scope(scope) - , location(location) - { - } - - bool visit(TypeId ty, const PendingExpansionType& petv) override - { - solver->pushConstraint(scope, location, TypeAliasExpansionConstraint{ty}); - return false; - } -}; - bool ConstraintSolver::tryDispatch(const TypeAliasExpansionConstraint& c, NotNull constraint) { const PendingExpansionType* petv = get(follow(c.target)); @@ -1246,7 +1269,7 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNullscope, Location{}, Covariant}; - u.useScopes = true; + u.enableScopeTests(); u.tryUnify(*instantiated, inferredTy, /* isFunctionCall */ true); @@ -1278,8 +1301,12 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNullscope, constraint->location, this}; + queuer.traverse(fn); + queuer.traverse(inferredTy); + return true; } } @@ -1295,7 +1322,7 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNullscope, Location{}, Covariant}; - u.useScopes = true; + u.enableScopeTests(); u.tryUnify(inferredTy, builtinTypes->anyType); u.tryUnify(fn, builtinTypes->anyType); @@ -1305,8 +1332,12 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNullscope, constraint->location, this}; + queuer.traverse(fn); + queuer.traverse(inferredTy); + return true; } @@ -1567,8 +1598,11 @@ bool ConstraintSolver::tryDispatch(const SetIndexerConstraint& c, NotNullstate == TableState::Free || tt->state == TableState::Unsealed) { + TypeId promotedIndexTy = arena->freshType(tt->scope); + unify(c.indexType, promotedIndexTy, constraint->scope); + auto mtt = getMutable(subjectType); - mtt->indexer = TableIndexer{c.indexType, c.propType}; + mtt->indexer = TableIndexer{promotedIndexTy, c.propType}; asMutable(c.propType)->ty.emplace(tt->scope); asMutable(c.resultType)->ty.emplace(subjectType); unblock(c.propType); @@ -1666,6 +1700,52 @@ bool ConstraintSolver::tryDispatch(const UnpackConstraint& c, NotNull constraint, bool force) +{ + TypeId ty = follow(c.ty); + FamilyGraphReductionResult result = reduceFamilies(ty, constraint->location, NotNull{arena}, builtinTypes, nullptr, force); + + for (TypeId r : result.reducedTypes) + unblock(r); + + for (TypePackId r : result.reducedPacks) + unblock(r); + + if (force) + return true; + + for (TypeId b : result.blockedTypes) + block(b, constraint); + + for (TypePackId b : result.blockedPacks) + block(b, constraint); + + return result.blockedTypes.empty() && result.blockedPacks.empty(); +} + +bool ConstraintSolver::tryDispatch(const ReducePackConstraint& c, NotNull constraint, bool force) +{ + TypePackId tp = follow(c.tp); + FamilyGraphReductionResult result = reduceFamilies(tp, constraint->location, NotNull{arena}, builtinTypes, nullptr, force); + + for (TypeId r : result.reducedTypes) + unblock(r); + + for (TypePackId r : result.reducedPacks) + unblock(r); + + if (force) + return true; + + for (TypeId b : result.blockedTypes) + block(b, constraint); + + for (TypePackId b : result.blockedPacks) + block(b, constraint); + + return result.blockedTypes.empty() && result.blockedPacks.empty(); +} + bool ConstraintSolver::tryDispatchIterableTable(TypeId iteratorTy, const IterableConstraint& c, NotNull constraint, bool force) { auto block_ = [&](auto&& t) { @@ -2031,7 +2111,7 @@ template bool ConstraintSolver::tryUnify(NotNull constraint, TID subTy, TID superTy) { Unifier u{normalizer, Mode::Strict, constraint->scope, constraint->location, Covariant}; - u.useScopes = true; + u.enableScopeTests(); u.tryUnify(subTy, superTy); @@ -2195,10 +2275,11 @@ void ConstraintSolver::unblock(NotNull progressed) return unblock_(progressed.get()); } -void ConstraintSolver::unblock(TypeId progressed) +void ConstraintSolver::unblock(TypeId ty) { DenseHashSet seen{nullptr}; + TypeId progressed = ty; while (true) { if (seen.find(progressed)) @@ -2256,7 +2337,7 @@ bool ConstraintSolver::isBlocked(NotNull constraint) void ConstraintSolver::unify(TypeId subType, TypeId superType, NotNull scope) { Unifier u{normalizer, Mode::Strict, scope, Location{}, Covariant}; - u.useScopes = true; + u.enableScopeTests(); u.tryUnify(subType, superType); @@ -2279,7 +2360,7 @@ void ConstraintSolver::unify(TypePackId subPack, TypePackId superPack, NotNull scope, if (unifyFreeTypes && (get(a) || get(b))) { Unifier u{normalizer, Mode::Strict, scope, Location{}, Covariant}; - u.useScopes = true; + u.enableScopeTests(); u.tryUnify(b, a); if (u.errors.empty()) diff --git a/Analysis/src/Error.cpp b/Analysis/src/Error.cpp index 1e03797..4f70be3 100644 --- a/Analysis/src/Error.cpp +++ b/Analysis/src/Error.cpp @@ -484,6 +484,16 @@ struct ErrorConverter { return "Attempting a dynamic property access on type '" + Luau::toString(e.ty) + "' is unsafe and may cause exceptions at runtime"; } + + std::string operator()(const UninhabitedTypeFamily& e) const + { + return "Type family instance " + Luau::toString(e.ty) + " is uninhabited"; + } + + std::string operator()(const UninhabitedTypePackFamily& e) const + { + return "Type pack family instance " + Luau::toString(e.tp) + " is uninhabited"; + } }; struct InvalidNameChecker @@ -786,6 +796,16 @@ bool DynamicPropertyLookupOnClassesUnsafe::operator==(const DynamicPropertyLooku return ty == rhs.ty; } +bool UninhabitedTypeFamily::operator==(const UninhabitedTypeFamily& rhs) const +{ + return ty == rhs.ty; +} + +bool UninhabitedTypePackFamily::operator==(const UninhabitedTypePackFamily& rhs) const +{ + return tp == rhs.tp; +} + std::string toString(const TypeError& error) { return toString(error, TypeErrorToStringOptions{}); @@ -944,6 +964,10 @@ void copyError(T& e, TypeArena& destArena, CloneState cloneState) } else if constexpr (std::is_same_v) e.ty = clone(e.ty); + else if constexpr (std::is_same_v) + e.ty = clone(e.ty); + else if constexpr (std::is_same_v) + e.tp = clone(e.tp); else static_assert(always_false_v, "Non-exhaustive type switch"); } diff --git a/Analysis/src/IostreamHelpers.cpp b/Analysis/src/IostreamHelpers.cpp index 43580da..000bb14 100644 --- a/Analysis/src/IostreamHelpers.cpp +++ b/Analysis/src/IostreamHelpers.cpp @@ -192,6 +192,10 @@ static void errorToString(std::ostream& stream, const T& err) stream << "TypePackMismatch { wanted = '" + toString(err.wantedTp) + "', given = '" + toString(err.givenTp) + "' }"; else if constexpr (std::is_same_v) stream << "DynamicPropertyLookupOnClassesUnsafe { " << toString(err.ty) << " }"; + else if constexpr (std::is_same_v) + stream << "UninhabitedTypeFamily { " << toString(err.ty) << " }"; + else if constexpr (std::is_same_v) + stream << "UninhabitedTypePackFamily { " << toString(err.tp) << " }"; else static_assert(always_false_v, "Non-exhaustive type switch"); } diff --git a/Analysis/src/Quantify.cpp b/Analysis/src/Quantify.cpp index 0a7975f..5a7a050 100644 --- a/Analysis/src/Quantify.cpp +++ b/Analysis/src/Quantify.cpp @@ -154,8 +154,8 @@ void quantify(TypeId ty, TypeLevel level) struct PureQuantifier : Substitution { Scope* scope; - std::vector insertedGenerics; - std::vector insertedGenericPacks; + OrderedMap insertedGenerics; + OrderedMap insertedGenericPacks; bool seenMutableType = false; bool seenGenericType = false; @@ -203,7 +203,7 @@ struct PureQuantifier : Substitution if (auto ftv = get(ty)) { TypeId result = arena->addType(GenericType{scope}); - insertedGenerics.push_back(result); + insertedGenerics.push(ty, result); return result; } else if (auto ttv = get(ty)) @@ -217,7 +217,10 @@ struct PureQuantifier : Substitution resultTable->scope = scope; if (ttv->state == TableState::Free) + { resultTable->state = TableState::Generic; + insertedGenerics.push(ty, result); + } else if (ttv->state == TableState::Unsealed) resultTable->state = TableState::Sealed; @@ -231,8 +234,8 @@ struct PureQuantifier : Substitution { if (auto ftp = get(tp)) { - TypePackId result = arena->addTypePack(TypePackVar{GenericTypePack{}}); - insertedGenericPacks.push_back(result); + TypePackId result = arena->addTypePack(TypePackVar{GenericTypePack{scope}}); + insertedGenericPacks.push(tp, result); return result; } @@ -252,7 +255,7 @@ struct PureQuantifier : Substitution } }; -std::optional quantify(TypeArena* arena, TypeId ty, Scope* scope) +std::optional quantify(TypeArena* arena, TypeId ty, Scope* scope) { PureQuantifier quantifier{arena, scope}; std::optional result = quantifier.substitute(ty); @@ -262,11 +265,20 @@ std::optional quantify(TypeArena* arena, TypeId ty, Scope* scope) FunctionType* ftv = getMutable(*result); LUAU_ASSERT(ftv); ftv->scope = scope; - ftv->generics.insert(ftv->generics.end(), quantifier.insertedGenerics.begin(), quantifier.insertedGenerics.end()); - ftv->genericPacks.insert(ftv->genericPacks.end(), quantifier.insertedGenericPacks.begin(), quantifier.insertedGenericPacks.end()); + + for (auto k : quantifier.insertedGenerics.keys) + { + TypeId g = quantifier.insertedGenerics.pairings[k]; + if (get(g)) + ftv->generics.push_back(g); + } + + for (auto k : quantifier.insertedGenericPacks.keys) + ftv->genericPacks.push_back(quantifier.insertedGenericPacks.pairings[k]); + ftv->hasNoGenerics = ftv->generics.empty() && ftv->genericPacks.empty() && !quantifier.seenGenericType && !quantifier.seenMutableType; - return *result; + return std::optional({*result, std::move(quantifier.insertedGenerics), std::move(quantifier.insertedGenericPacks)}); } } // namespace Luau diff --git a/Analysis/src/Substitution.cpp b/Analysis/src/Substitution.cpp index 6a600b6..40a4959 100644 --- a/Analysis/src/Substitution.cpp +++ b/Analysis/src/Substitution.cpp @@ -78,6 +78,11 @@ static TypeId DEPRECATED_shallowClone(TypeId ty, TypeArena& dest, const TxnLog* { result = dest.addType(NegationType{ntv->ty}); } + else if (const TypeFamilyInstanceType* tfit = get(ty)) + { + TypeFamilyInstanceType clone{tfit->family, tfit->typeArguments, tfit->packArguments}; + result = dest.addType(std::move(clone)); + } else return result; @@ -168,14 +173,27 @@ static TypeId shallowClone(TypeId ty, TypeArena& dest, const TxnLog* log, bool a { if (alwaysClone) { - ClassType clone{a.name, a.props, a.parent, a.metatable, a.tags, a.userData, a.definitionModuleName}; - return dest.addType(std::move(clone)); + if (FFlag::LuauTypecheckClassTypeIndexers) + { + ClassType clone{a.name, a.props, a.parent, a.metatable, a.tags, a.userData, a.definitionModuleName, a.indexer}; + return dest.addType(std::move(clone)); + } + else + { + ClassType clone{a.name, a.props, a.parent, a.metatable, a.tags, a.userData, a.definitionModuleName}; + return dest.addType(std::move(clone)); + } } else return ty; } else if constexpr (std::is_same_v) return dest.addType(NegationType{a.ty}); + else if constexpr (std::is_same_v) + { + TypeFamilyInstanceType clone{a.family, a.typeArguments, a.packArguments}; + return dest.addType(std::move(clone)); + } else static_assert(always_false_v, "Non-exhaustive shallowClone switch"); }; @@ -255,6 +273,14 @@ void Tarjan::visitChildren(TypeId ty, int index) for (TypePackId a : petv->packArguments) visitChild(a); } + else if (const TypeFamilyInstanceType* tfit = get(ty)) + { + for (TypeId a : tfit->typeArguments) + visitChild(a); + + for (TypePackId a : tfit->packArguments) + visitChild(a); + } else if (const ClassType* ctv = get(ty); FFlag::LuauClassTypeVarsInSubstitution && ctv) { for (const auto& [name, prop] : ctv->props) @@ -265,6 +291,15 @@ void Tarjan::visitChildren(TypeId ty, int index) if (ctv->metatable) visitChild(*ctv->metatable); + + if (FFlag::LuauTypecheckClassTypeIndexers) + { + if (ctv->indexer) + { + visitChild(ctv->indexer->indexType); + visitChild(ctv->indexer->indexResultType); + } + } } else if (const NegationType* ntv = get(ty)) { @@ -669,6 +704,14 @@ TypePackId Substitution::clone(TypePackId tp) clone.hidden = vtp->hidden; return addTypePack(std::move(clone)); } + else if (const TypeFamilyInstanceTypePack* tfitp = get(tp)) + { + TypeFamilyInstanceTypePack clone{ + tfitp->family, std::vector(tfitp->typeArguments.size()), std::vector(tfitp->packArguments.size())}; + clone.typeArguments.assign(tfitp->typeArguments.begin(), tfitp->typeArguments.end()); + clone.packArguments.assign(tfitp->packArguments.begin(), tfitp->packArguments.end()); + return addTypePack(std::move(clone)); + } else if (FFlag::LuauClonePublicInterfaceLess2) { return addTypePack(*tp); @@ -786,6 +829,14 @@ void Substitution::replaceChildren(TypeId ty) for (TypePackId& a : petv->packArguments) a = replace(a); } + else if (TypeFamilyInstanceType* tfit = getMutable(ty)) + { + for (TypeId& a : tfit->typeArguments) + a = replace(a); + + for (TypePackId& a : tfit->packArguments) + a = replace(a); + } else if (ClassType* ctv = getMutable(ty); FFlag::LuauClassTypeVarsInSubstitution && ctv) { for (auto& [name, prop] : ctv->props) @@ -796,6 +847,15 @@ void Substitution::replaceChildren(TypeId ty) if (ctv->metatable) ctv->metatable = replace(*ctv->metatable); + + if (FFlag::LuauTypecheckClassTypeIndexers) + { + if (ctv->indexer) + { + ctv->indexer->indexType = replace(ctv->indexer->indexType); + ctv->indexer->indexResultType = replace(ctv->indexer->indexResultType); + } + } } else if (NegationType* ntv = getMutable(ty)) { @@ -824,6 +884,14 @@ void Substitution::replaceChildren(TypePackId tp) { vtp->ty = replace(vtp->ty); } + else if (TypeFamilyInstanceTypePack* tfitp = getMutable(tp)) + { + for (TypeId& t : tfitp->typeArguments) + t = replace(t); + + for (TypePackId& t : tfitp->packArguments) + t = replace(t); + } } } // namespace Luau diff --git a/Analysis/src/ToDot.cpp b/Analysis/src/ToDot.cpp index 8d889cb..f2f15e8 100644 --- a/Analysis/src/ToDot.cpp +++ b/Analysis/src/ToDot.cpp @@ -257,6 +257,15 @@ void StateDot::visitChildren(TypeId ty, int index) if (ctv->metatable) visitChild(*ctv->metatable, index, "[metatable]"); + + if (FFlag::LuauTypecheckClassTypeIndexers) + { + if (ctv->indexer) + { + visitChild(ctv->indexer->indexType, index, "[index]"); + visitChild(ctv->indexer->indexResultType, index, "[value]"); + } + } } else if (const SingletonType* stv = get(ty)) { diff --git a/Analysis/src/ToString.cpp b/Analysis/src/ToString.cpp index ea3ab57..f5b908e 100644 --- a/Analysis/src/ToString.cpp +++ b/Analysis/src/ToString.cpp @@ -8,6 +8,7 @@ #include "Luau/TypeInfer.h" #include "Luau/TypePack.h" #include "Luau/Type.h" +#include "Luau/TypeFamily.h" #include "Luau/VisitType.h" #include @@ -16,11 +17,22 @@ LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution) /* - * Prefix generic typenames with gen- - * Additionally, free types will be prefixed with free- and suffixed with their level. eg free-a-4 - * Fair warning: Setting this will break a lot of Luau unit tests. + * Enables increasing levels of verbosity for Luau type names when stringifying. + * After level 2, test cases will break unpredictably because a pointer to their + * scope will be included in the stringification of generic and free types. + * + * Supported values: + * + * 0: Disabled, no changes. + * + * 1: Prefix free/generic types with free- and gen-, respectively. Also reveal + * hidden variadic tails. + * + * 2: Suffix free/generic types with their scope depth. + * + * 3: Suffix free/generic types with their scope pointer, if present. */ -LUAU_FASTFLAGVARIABLE(DebugLuauVerboseTypeNames, false) +LUAU_FASTINTVARIABLE(DebugLuauVerboseTypeNames, 0) LUAU_FASTFLAGVARIABLE(DebugLuauToStringNoLexicalSort, false) namespace Luau @@ -223,11 +235,15 @@ struct StringifierState ++count; emit(count); - emit("-"); - char buffer[16]; - uint32_t s = uint32_t(intptr_t(scope) & 0xFFFFFF); - snprintf(buffer, sizeof(buffer), "0x%x", s); - emit(buffer); + + if (FInt::DebugLuauVerboseTypeNames >= 3) + { + emit("-"); + char buffer[16]; + uint32_t s = uint32_t(intptr_t(scope) & 0xFFFFFF); + snprintf(buffer, sizeof(buffer), "0x%x", s); + emit(buffer); + } } void emit(TypeLevel level) @@ -371,11 +387,13 @@ struct TypeStringifier void operator()(TypeId ty, const FreeType& ftv) { state.result.invalid = true; - if (FFlag::DebugLuauVerboseTypeNames) + + if (FInt::DebugLuauVerboseTypeNames >= 1) state.emit("free-"); + state.emit(state.getName(ty)); - if (FFlag::DebugLuauVerboseTypeNames) + if (FInt::DebugLuauVerboseTypeNames >= 2) { state.emit("-"); if (FFlag::DebugLuauDeferredConstraintResolution) @@ -392,6 +410,9 @@ struct TypeStringifier void operator()(TypeId ty, const GenericType& gtv) { + if (FInt::DebugLuauVerboseTypeNames >= 1) + state.emit("gen-"); + if (gtv.explicitName) { state.usedNames.insert(gtv.name); @@ -401,7 +422,7 @@ struct TypeStringifier else state.emit(state.getName(ty)); - if (FFlag::DebugLuauVerboseTypeNames) + if (FInt::DebugLuauVerboseTypeNames >= 2) { state.emit("-"); if (FFlag::DebugLuauDeferredConstraintResolution) @@ -871,6 +892,33 @@ struct TypeStringifier if (parens) state.emit(")"); } + + void operator()(TypeId, const TypeFamilyInstanceType& tfitv) + { + state.emit(tfitv.family->name); + state.emit("<"); + + bool comma = false; + for (TypeId ty : tfitv.typeArguments) + { + if (comma) + state.emit(", "); + + comma = true; + stringify(ty); + } + + for (TypePackId tp : tfitv.packArguments) + { + if (comma) + state.emit(", "); + + comma = true; + stringify(tp); + } + + state.emit(">"); + } }; struct TypePackStringifier @@ -958,7 +1006,7 @@ struct TypePackStringifier if (tp.tail && !isEmpty(*tp.tail)) { TypePackId tail = follow(*tp.tail); - if (auto vtp = get(tail); !vtp || (!FFlag::DebugLuauVerboseTypeNames && !vtp->hidden)) + if (auto vtp = get(tail); !vtp || (FInt::DebugLuauVerboseTypeNames < 1 && !vtp->hidden)) { if (first) first = false; @@ -981,7 +1029,7 @@ struct TypePackStringifier void operator()(TypePackId, const VariadicTypePack& pack) { state.emit("..."); - if (FFlag::DebugLuauVerboseTypeNames && pack.hidden) + if (FInt::DebugLuauVerboseTypeNames >= 1 && pack.hidden) { state.emit("*hidden*"); } @@ -990,6 +1038,9 @@ struct TypePackStringifier void operator()(TypePackId tp, const GenericTypePack& pack) { + if (FInt::DebugLuauVerboseTypeNames >= 1) + state.emit("gen-"); + if (pack.explicitName) { state.usedNames.insert(pack.name); @@ -1001,7 +1052,7 @@ struct TypePackStringifier state.emit(state.getName(tp)); } - if (FFlag::DebugLuauVerboseTypeNames) + if (FInt::DebugLuauVerboseTypeNames >= 2) { state.emit("-"); if (FFlag::DebugLuauDeferredConstraintResolution) @@ -1009,17 +1060,18 @@ struct TypePackStringifier else state.emit(pack.level); } + state.emit("..."); } void operator()(TypePackId tp, const FreeTypePack& pack) { state.result.invalid = true; - if (FFlag::DebugLuauVerboseTypeNames) + if (FInt::DebugLuauVerboseTypeNames >= 1) state.emit("free-"); state.emit(state.getName(tp)); - if (FFlag::DebugLuauVerboseTypeNames) + if (FInt::DebugLuauVerboseTypeNames >= 2) { state.emit("-"); if (FFlag::DebugLuauDeferredConstraintResolution) @@ -1042,6 +1094,33 @@ struct TypePackStringifier state.emit(btp.index); state.emit("*"); } + + void operator()(TypePackId, const TypeFamilyInstanceTypePack& tfitp) + { + state.emit(tfitp.family->name); + state.emit("<"); + + bool comma = false; + for (TypeId p : tfitp.typeArguments) + { + if (comma) + state.emit(", "); + + comma = true; + stringify(p); + } + + for (TypePackId p : tfitp.packArguments) + { + if (comma) + state.emit(", "); + + comma = true; + stringify(p); + } + + state.emit(">"); + } }; void TypeStringifier::stringify(TypePackId tp) @@ -1560,6 +1639,12 @@ std::string toString(const Constraint& constraint, ToStringOptions& opts) } else if constexpr (std::is_same_v) return tos(c.resultPack) + " ~ unpack " + tos(c.sourcePack); + else if constexpr (std::is_same_v) + return "reduce " + tos(c.ty); + else if constexpr (std::is_same_v) + { + return "reduce " + tos(c.tp); + } else static_assert(always_false_v, "Non-exhaustive constraint switch"); }; diff --git a/Analysis/src/TxnLog.cpp b/Analysis/src/TxnLog.cpp index 33554ce..53dd3b4 100644 --- a/Analysis/src/TxnLog.cpp +++ b/Analysis/src/TxnLog.cpp @@ -1,6 +1,7 @@ // This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details #include "Luau/TxnLog.h" +#include "Luau/Scope.h" #include "Luau/ToString.h" #include "Luau/TypeArena.h" #include "Luau/TypePack.h" @@ -8,6 +9,8 @@ #include #include +LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution) + namespace Luau { @@ -71,7 +74,11 @@ const TxnLog* TxnLog::empty() void TxnLog::concat(TxnLog rhs) { for (auto& [ty, rep] : rhs.typeVarChanges) + { + if (rep->dead) + continue; typeVarChanges[ty] = std::move(rep); + } for (auto& [tp, rep] : rhs.typePackChanges) typePackChanges[tp] = std::move(rep); @@ -81,7 +88,10 @@ void TxnLog::concatAsIntersections(TxnLog rhs, NotNull arena) { for (auto& [ty, rightRep] : rhs.typeVarChanges) { - if (auto leftRep = typeVarChanges.find(ty)) + if (rightRep->dead) + continue; + + if (auto leftRep = typeVarChanges.find(ty); leftRep && !(*leftRep)->dead) { TypeId leftTy = arena->addType((*leftRep)->pending); TypeId rightTy = arena->addType(rightRep->pending); @@ -97,16 +107,94 @@ void TxnLog::concatAsIntersections(TxnLog rhs, NotNull arena) void TxnLog::concatAsUnion(TxnLog rhs, NotNull arena) { - for (auto& [ty, rightRep] : rhs.typeVarChanges) + if (FFlag::DebugLuauDeferredConstraintResolution) { - if (auto leftRep = typeVarChanges.find(ty)) + /* + * Check for cycles. + * + * We must not combine a log entry that binds 'a to 'b with a log that + * binds 'b to 'a. + * + * Of the two, identify the one with the 'bigger' scope and eliminate the + * entry that rebinds it. + */ + for (const auto& [rightTy, rightRep] : rhs.typeVarChanges) { - TypeId leftTy = arena->addType((*leftRep)->pending); - TypeId rightTy = arena->addType(rightRep->pending); - typeVarChanges[ty]->pending.ty = UnionType{{leftTy, rightTy}}; + if (rightRep->dead) + continue; + + // We explicitly use get_if here because we do not wish to do anything + // if the uncommitted type is already bound to something else. + const FreeType* rf = get_if(&rightTy->ty); + if (!rf) + continue; + + const BoundType* rb = Luau::get(&rightRep->pending); + if (!rb) + continue; + + const TypeId leftTy = rb->boundTo; + const FreeType* lf = get_if(&leftTy->ty); + if (!lf) + continue; + + auto leftRep = typeVarChanges.find(leftTy); + if (!leftRep) + continue; + + if ((*leftRep)->dead) + continue; + + const BoundType* lb = Luau::get(&(*leftRep)->pending); + if (!lb) + continue; + + if (lb->boundTo == rightTy) + { + // leftTy has been bound to rightTy, but rightTy has also been bound + // to leftTy. We find the one that belongs to the more deeply nested + // scope and remove it from the log. + const bool discardLeft = useScopes ? subsumes(lf->scope, rf->scope) : lf->level.subsumes(rf->level); + + if (discardLeft) + (*leftRep)->dead = true; + else + rightRep->dead = true; + } + } + + for (auto& [ty, rightRep] : rhs.typeVarChanges) + { + if (rightRep->dead) + continue; + + if (auto leftRep = typeVarChanges.find(ty); leftRep && !(*leftRep)->dead) + { + TypeId leftTy = arena->addType((*leftRep)->pending); + TypeId rightTy = arena->addType(rightRep->pending); + + if (follow(leftTy) == follow(rightTy)) + typeVarChanges[ty] = std::move(rightRep); + else + typeVarChanges[ty]->pending.ty = UnionType{{leftTy, rightTy}}; + } + else + typeVarChanges[ty] = std::move(rightRep); + } + } + else + { + for (auto& [ty, rightRep] : rhs.typeVarChanges) + { + if (auto leftRep = typeVarChanges.find(ty)) + { + TypeId leftTy = arena->addType((*leftRep)->pending); + TypeId rightTy = arena->addType(rightRep->pending); + typeVarChanges[ty]->pending.ty = UnionType{{leftTy, rightTy}}; + } + else + typeVarChanges[ty] = std::move(rightRep); } - else - typeVarChanges[ty] = std::move(rightRep); } for (auto& [tp, rep] : rhs.typePackChanges) @@ -116,7 +204,10 @@ void TxnLog::concatAsUnion(TxnLog rhs, NotNull arena) void TxnLog::commit() { for (auto& [ty, rep] : typeVarChanges) - asMutable(ty)->reassign(rep.get()->pending); + { + if (!rep->dead) + asMutable(ty)->reassign(rep.get()->pending); + } for (auto& [tp, rep] : typePackChanges) asMutable(tp)->reassign(rep.get()->pending); @@ -135,7 +226,10 @@ TxnLog TxnLog::inverse() TxnLog inversed(sharedSeen); for (auto& [ty, _rep] : typeVarChanges) - inversed.typeVarChanges[ty] = std::make_unique(*ty); + { + if (!_rep->dead) + inversed.typeVarChanges[ty] = std::make_unique(*ty); + } for (auto& [tp, _rep] : typePackChanges) inversed.typePackChanges[tp] = std::make_unique(*tp); @@ -204,7 +298,7 @@ PendingType* TxnLog::queue(TypeId ty) // Explicitly don't look in ancestors. If we have discovered something new // about this type, we don't want to mutate the parent's state. auto& pending = typeVarChanges[ty]; - if (!pending) + if (!pending || (*pending).dead) { pending = std::make_unique(*ty); pending->pending.owningArena = nullptr; @@ -237,7 +331,7 @@ PendingType* TxnLog::pending(TypeId ty) const for (const TxnLog* current = this; current; current = current->parent) { - if (auto it = current->typeVarChanges.find(ty)) + if (auto it = current->typeVarChanges.find(ty); it && !(*it)->dead) return it->get(); } diff --git a/Analysis/src/TypeAttach.cpp b/Analysis/src/TypeAttach.cpp index 86f7816..dba9547 100644 --- a/Analysis/src/TypeAttach.cpp +++ b/Analysis/src/TypeAttach.cpp @@ -9,6 +9,7 @@ #include "Luau/TypeInfer.h" #include "Luau/TypePack.h" #include "Luau/Type.h" +#include "Luau/TypeFamily.h" #include @@ -362,6 +363,10 @@ public: // FIXME: do the same thing we do with ErrorType throw InternalCompilerError("Cannot convert NegationType into AstNode"); } + AstType* operator()(const TypeFamilyInstanceType& tfit) + { + return allocator->alloc(Location(), std::nullopt, AstName{tfit.family->name.c_str()}, std::nullopt, Location()); + } private: Allocator* allocator; @@ -432,6 +437,11 @@ public: return allocator->alloc(Location(), AstName("Unifiable")); } + AstTypePack* operator()(const TypeFamilyInstanceTypePack& tfitp) const + { + return allocator->alloc(Location(), AstName(tfitp.family->name.c_str())); + } + private: Allocator* allocator; SyntheticNames* syntheticNames; diff --git a/Analysis/src/TypeChecker2.cpp b/Analysis/src/TypeChecker2.cpp index 2a2fe69..a1f764a 100644 --- a/Analysis/src/TypeChecker2.cpp +++ b/Analysis/src/TypeChecker2.cpp @@ -16,6 +16,7 @@ #include "Luau/TypeReduction.h" #include "Luau/TypeUtils.h" #include "Luau/Unifier.h" +#include "Luau/TypeFamily.h" #include @@ -113,6 +114,13 @@ struct TypeChecker2 return std::nullopt; } + TypeId checkForFamilyInhabitance(TypeId instance, Location location) + { + TxnLog fake{}; + reportErrors(reduceFamilies(instance, location, NotNull{&testArena}, builtinTypes, &fake, true).errors); + return instance; + } + TypePackId lookupPack(AstExpr* expr) { // If a type isn't in the type graph, it probably means that a recursion limit was exceeded. @@ -132,11 +140,11 @@ struct TypeChecker2 // allows us not to think about this very much in the actual typechecking logic. TypeId* ty = module->astTypes.find(expr); if (ty) - return follow(*ty); + return checkForFamilyInhabitance(follow(*ty), expr->location); TypePackId* tp = module->astTypePacks.find(expr); if (tp) - return flattenPack(*tp); + return checkForFamilyInhabitance(flattenPack(*tp), expr->location); return builtinTypes->anyType; } @@ -159,7 +167,7 @@ struct TypeChecker2 TypeId* ty = module->astResolvedTypes.find(annotation); LUAU_ASSERT(ty); - return follow(*ty); + return checkForFamilyInhabitance(follow(*ty), annotation->location); } TypePackId lookupPackAnnotation(AstTypePack* annotation) @@ -311,6 +319,7 @@ struct TypeChecker2 TypePackId actualRetType = reconstructPack(ret->list, *arena); Unifier u{NotNull{&normalizer}, Mode::Strict, stack.back(), ret->location, Covariant}; + u.hideousFixMeGenericsAreActuallyFree = true; u.tryUnify(actualRetType, expectedRetType); const bool ok = u.errors.empty() && u.log.empty(); @@ -989,8 +998,11 @@ struct TypeChecker2 return; } + TxnLog fake{}; + LUAU_ASSERT(ftv); - reportErrors(tryUnify(stack.back(), call->location, ftv->retTypes, expectedRetType, CountMismatch::Context::Return)); + reportErrors(tryUnify(stack.back(), call->location, ftv->retTypes, expectedRetType, CountMismatch::Context::Return, /* genericsOkay */ true)); + reportErrors(reduceFamilies(ftv->retTypes, call->location, NotNull{&testArena}, builtinTypes, &fake, true).errors); auto it = begin(expectedArgTypes); size_t i = 0; @@ -1007,7 +1019,8 @@ struct TypeChecker2 Location argLoc = argLocs.at(i >= argLocs.size() ? argLocs.size() - 1 : i); - reportErrors(tryUnify(stack.back(), argLoc, expectedArg, arg)); + reportErrors(tryUnify(stack.back(), argLoc, expectedArg, arg, CountMismatch::Context::Arg, /* genericsOkay */ true)); + reportErrors(reduceFamilies(arg, argLoc, NotNull{&testArena}, builtinTypes, &fake, true).errors); ++it; ++i; @@ -1018,7 +1031,8 @@ struct TypeChecker2 if (auto tail = it.tail()) { TypePackId remainingArgs = testArena.addTypePack(TypePack{std::move(slice), std::nullopt}); - reportErrors(tryUnify(stack.back(), argLocs.back(), *tail, remainingArgs)); + reportErrors(tryUnify(stack.back(), argLocs.back(), *tail, remainingArgs, CountMismatch::Context::Arg, /* genericsOkay */ true)); + reportErrors(reduceFamilies(remainingArgs, argLocs.back(), NotNull{&testArena}, builtinTypes, &fake, true).errors); } } @@ -1344,7 +1358,7 @@ struct TypeChecker2 else if (get(rightType) || get(rightType)) return rightType; - if ((get(leftType) || get(leftType)) && !isEquality && !isLogical) + if ((get(leftType) || get(leftType) || get(leftType)) && !isEquality && !isLogical) { auto name = getIdentifierOfBaseVar(expr->left); reportError(CannotInferBinaryOperation{expr->op, name, @@ -1591,10 +1605,10 @@ struct TypeChecker2 TypeId computedType = lookupType(expr->expr); // Note: As an optimization, we try 'number <: number | string' first, as that is the more likely case. - if (isSubtype(annotationType, computedType, stack.back())) + if (isSubtype(annotationType, computedType, stack.back(), true)) return; - if (isSubtype(computedType, annotationType, stack.back())) + if (isSubtype(computedType, annotationType, stack.back(), true)) return; reportError(TypesAreUnrelated{computedType, annotationType}, expr->location); @@ -1679,6 +1693,10 @@ struct TypeChecker2 void visit(AstType* ty) { + TypeId* resolvedTy = module->astResolvedTypes.find(ty); + if (resolvedTy) + checkForFamilyInhabitance(follow(*resolvedTy), ty->location); + if (auto t = ty->as()) return visit(t); else if (auto t = ty->as()) @@ -1989,11 +2007,12 @@ struct TypeChecker2 } template - bool isSubtype(TID subTy, TID superTy, NotNull scope) + bool isSubtype(TID subTy, TID superTy, NotNull scope, bool genericsOkay = false) { TypeArena arena; Unifier u{NotNull{&normalizer}, Mode::Strict, scope, Location{}, Covariant}; - u.useScopes = true; + u.hideousFixMeGenericsAreActuallyFree = genericsOkay; + u.enableScopeTests(); u.tryUnify(subTy, superTy); const bool ok = u.errors.empty() && u.log.empty(); @@ -2001,11 +2020,13 @@ struct TypeChecker2 } template - ErrorVec tryUnify(NotNull scope, const Location& location, TID subTy, TID superTy, CountMismatch::Context context = CountMismatch::Arg) + ErrorVec tryUnify(NotNull scope, const Location& location, TID subTy, TID superTy, CountMismatch::Context context = CountMismatch::Arg, + bool genericsOkay = false) { Unifier u{NotNull{&normalizer}, Mode::Strict, scope, location, Covariant}; u.ctx = context; - u.useScopes = true; + u.hideousFixMeGenericsAreActuallyFree = genericsOkay; + u.enableScopeTests(); u.tryUnify(subTy, superTy); return std::move(u.errors); diff --git a/Analysis/src/TypeFamily.cpp b/Analysis/src/TypeFamily.cpp new file mode 100644 index 0000000..1941573 --- /dev/null +++ b/Analysis/src/TypeFamily.cpp @@ -0,0 +1,310 @@ +// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details + +#include "Luau/TypeFamily.h" + +#include "Luau/DenseHash.h" +#include "Luau/VisitType.h" +#include "Luau/TxnLog.h" +#include "Luau/Substitution.h" +#include "Luau/ToString.h" + +LUAU_DYNAMIC_FASTINTVARIABLE(LuauTypeFamilyGraphReductionMaximumSteps, 1'000'000); + +namespace Luau +{ + +struct InstanceCollector : TypeOnceVisitor +{ + std::deque tys; + std::deque tps; + + bool visit(TypeId ty, const TypeFamilyInstanceType&) override + { + // TypeOnceVisitor performs a depth-first traversal in the absence of + // cycles. This means that by pushing to the front of the queue, we will + // try to reduce deeper instances first if we start with the first thing + // in the queue. Consider Add, number>, number>: + // we want to reduce the innermost Add instantiation + // first. + tys.push_front(ty); + return true; + } + + bool visit(TypePackId tp, const TypeFamilyInstanceTypePack&) override + { + // TypeOnceVisitor performs a depth-first traversal in the absence of + // cycles. This means that by pushing to the front of the queue, we will + // try to reduce deeper instances first if we start with the first thing + // in the queue. Consider Add, number>, number>: + // we want to reduce the innermost Add instantiation + // first. + tps.push_front(tp); + return true; + } +}; + +struct FamilyReducer +{ + std::deque queuedTys; + std::deque queuedTps; + DenseHashSet irreducible{nullptr}; + FamilyGraphReductionResult result; + Location location; + NotNull arena; + NotNull builtins; + TxnLog* log = nullptr; + NotNull reducerLog; + bool force = false; + + FamilyReducer(std::deque queuedTys, std::deque queuedTps, Location location, NotNull arena, + NotNull builtins, TxnLog* log = nullptr, bool force = false) + : queuedTys(std::move(queuedTys)) + , queuedTps(std::move(queuedTps)) + , location(location) + , arena(arena) + , builtins(builtins) + , log(log) + , reducerLog(NotNull{log ? log : TxnLog::empty()}) + , force(force) + { + } + + enum class SkipTestResult + { + Irreducible, + Defer, + Okay, + }; + + SkipTestResult testForSkippability(TypeId ty) + { + ty = reducerLog->follow(ty); + + if (reducerLog->is(ty)) + { + if (!irreducible.contains(ty)) + return SkipTestResult::Defer; + else + return SkipTestResult::Irreducible; + } + else if (reducerLog->is(ty)) + { + return SkipTestResult::Irreducible; + } + + return SkipTestResult::Okay; + } + + SkipTestResult testForSkippability(TypePackId ty) + { + ty = reducerLog->follow(ty); + + if (reducerLog->is(ty)) + { + if (!irreducible.contains(ty)) + return SkipTestResult::Defer; + else + return SkipTestResult::Irreducible; + } + else if (reducerLog->is(ty)) + { + return SkipTestResult::Irreducible; + } + + return SkipTestResult::Okay; + } + + template + void replace(T subject, T replacement) + { + if (log) + log->replace(subject, Unifiable::Bound{replacement}); + else + asMutable(subject)->ty.template emplace>(replacement); + + if constexpr (std::is_same_v) + result.reducedTypes.insert(subject); + else if constexpr (std::is_same_v) + result.reducedPacks.insert(subject); + } + + template + void handleFamilyReduction(T subject, TypeFamilyReductionResult reduction) + { + if (reduction.result) + replace(subject, *reduction.result); + else + { + irreducible.insert(subject); + + if (reduction.uninhabited || force) + { + if constexpr (std::is_same_v) + result.errors.push_back(TypeError{location, UninhabitedTypeFamily{subject}}); + else if constexpr (std::is_same_v) + result.errors.push_back(TypeError{location, UninhabitedTypePackFamily{subject}}); + } + else if (!reduction.uninhabited && !force) + { + for (TypeId b : reduction.blockedTypes) + result.blockedTypes.insert(b); + + for (TypePackId b : reduction.blockedPacks) + result.blockedPacks.insert(b); + } + } + } + + bool done() + { + return queuedTys.empty() && queuedTps.empty(); + } + + template + bool testParameters(T subject, const I* tfit) + { + for (TypeId p : tfit->typeArguments) + { + SkipTestResult skip = testForSkippability(p); + + if (skip == SkipTestResult::Irreducible) + { + irreducible.insert(subject); + return false; + } + else if (skip == SkipTestResult::Defer) + { + if constexpr (std::is_same_v) + queuedTys.push_back(subject); + else if constexpr (std::is_same_v) + queuedTps.push_back(subject); + + return false; + } + } + + for (TypePackId p : tfit->packArguments) + { + SkipTestResult skip = testForSkippability(p); + + if (skip == SkipTestResult::Irreducible) + { + irreducible.insert(subject); + return false; + } + else if (skip == SkipTestResult::Defer) + { + if constexpr (std::is_same_v) + queuedTys.push_back(subject); + else if constexpr (std::is_same_v) + queuedTps.push_back(subject); + + return false; + } + } + + return true; + } + + void stepType() + { + TypeId subject = reducerLog->follow(queuedTys.front()); + queuedTys.pop_front(); + + if (irreducible.contains(subject)) + return; + + if (const TypeFamilyInstanceType* tfit = reducerLog->get(subject)) + { + if (!testParameters(subject, tfit)) + return; + + TypeFamilyReductionResult result = tfit->family->reducer(tfit->typeArguments, tfit->packArguments, arena, builtins, reducerLog); + handleFamilyReduction(subject, result); + } + } + + void stepPack() + { + TypePackId subject = reducerLog->follow(queuedTps.front()); + queuedTps.pop_front(); + + if (irreducible.contains(subject)) + return; + + if (const TypeFamilyInstanceTypePack* tfit = reducerLog->get(subject)) + { + if (!testParameters(subject, tfit)) + return; + + TypeFamilyReductionResult result = + tfit->family->reducer(tfit->typeArguments, tfit->packArguments, arena, builtins, reducerLog); + handleFamilyReduction(subject, result); + } + } + + void step() + { + if (!queuedTys.empty()) + stepType(); + else if (!queuedTps.empty()) + stepPack(); + } +}; + +static FamilyGraphReductionResult reduceFamiliesInternal(std::deque queuedTys, std::deque queuedTps, Location location, + NotNull arena, NotNull builtins, TxnLog* log, bool force) +{ + FamilyReducer reducer{std::move(queuedTys), std::move(queuedTps), location, arena, builtins, log, force}; + int iterationCount = 0; + + while (!reducer.done()) + { + reducer.step(); + + ++iterationCount; + if (iterationCount > DFInt::LuauTypeFamilyGraphReductionMaximumSteps) + { + reducer.result.errors.push_back(TypeError{location, CodeTooComplex{}}); + break; + } + } + + return std::move(reducer.result); +} + +FamilyGraphReductionResult reduceFamilies( + TypeId entrypoint, Location location, NotNull arena, NotNull builtins, TxnLog* log, bool force) +{ + InstanceCollector collector; + + try + { + collector.traverse(entrypoint); + } + catch (RecursionLimitException&) + { + return FamilyGraphReductionResult{}; + } + + return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, arena, builtins, log, force); +} + +FamilyGraphReductionResult reduceFamilies( + TypePackId entrypoint, Location location, NotNull arena, NotNull builtins, TxnLog* log, bool force) +{ + InstanceCollector collector; + + try + { + collector.traverse(entrypoint); + } + catch (RecursionLimitException&) + { + return FamilyGraphReductionResult{}; + } + + return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, arena, builtins, log, force); +} + +} // namespace Luau diff --git a/Analysis/src/TypeInfer.cpp b/Analysis/src/TypeInfer.cpp index 1ccba91..94c64ee 100644 --- a/Analysis/src/TypeInfer.cpp +++ b/Analysis/src/TypeInfer.cpp @@ -41,6 +41,7 @@ LUAU_FASTFLAG(LuauOccursIsntAlwaysFailure) LUAU_FASTFLAGVARIABLE(LuauTypecheckTypeguards, false) LUAU_FASTFLAGVARIABLE(LuauTinyControlFlowAnalysis, false) LUAU_FASTFLAG(LuauRequirePathTrueModuleName) +LUAU_FASTFLAGVARIABLE(LuauTypecheckClassTypeIndexers, false) namespace Luau { @@ -2104,6 +2105,23 @@ std::optional TypeChecker::getIndexTypeFromTypeImpl( const Property* prop = lookupClassProp(cls, name); if (prop) return prop->type(); + + if (FFlag::LuauTypecheckClassTypeIndexers) + { + if (auto indexer = cls->indexer) + { + // TODO: Property lookup should work with string singletons or unions thereof as the indexer key type. + ErrorVec errors = tryUnify(stringType, indexer->indexType, scope, location); + + if (errors.empty()) + return indexer->indexResultType; + + if (addErrors) + reportError(location, UnknownProperty{type, name}); + + return std::nullopt; + } + } } else if (const UnionType* utv = get(type)) { @@ -3295,14 +3313,38 @@ TypeId TypeChecker::checkLValueBinding(const ScopePtr& scope, const AstExprIndex } else if (const ClassType* lhsClass = get(lhs)) { - const Property* prop = lookupClassProp(lhsClass, name); - if (!prop) + if (FFlag::LuauTypecheckClassTypeIndexers) { + if (const Property* prop = lookupClassProp(lhsClass, name)) + { + return prop->type(); + } + + if (auto indexer = lhsClass->indexer) + { + Unifier state = mkUnifier(scope, expr.location); + state.tryUnify(stringType, indexer->indexType); + if (state.errors.empty()) + { + state.log.commit(); + return indexer->indexResultType; + } + } + reportError(TypeError{expr.location, UnknownProperty{lhs, name}}); return errorRecoveryType(scope); } + else + { + const Property* prop = lookupClassProp(lhsClass, name); + if (!prop) + { + reportError(TypeError{expr.location, UnknownProperty{lhs, name}}); + return errorRecoveryType(scope); + } - return prop->type(); + return prop->type(); + } } else if (get(lhs)) { @@ -3344,23 +3386,57 @@ TypeId TypeChecker::checkLValueBinding(const ScopePtr& scope, const AstExprIndex { if (const ClassType* exprClass = get(exprType)) { - const Property* prop = lookupClassProp(exprClass, value->value.data); - if (!prop) + if (FFlag::LuauTypecheckClassTypeIndexers) { + if (const Property* prop = lookupClassProp(exprClass, value->value.data)) + { + return prop->type(); + } + + if (auto indexer = exprClass->indexer) + { + unify(stringType, indexer->indexType, scope, expr.index->location); + return indexer->indexResultType; + } + reportError(TypeError{expr.location, UnknownProperty{exprType, value->value.data}}); return errorRecoveryType(scope); } - return prop->type(); + else + { + const Property* prop = lookupClassProp(exprClass, value->value.data); + if (!prop) + { + reportError(TypeError{expr.location, UnknownProperty{exprType, value->value.data}}); + return errorRecoveryType(scope); + } + return prop->type(); + } } } - else if (FFlag::LuauAllowIndexClassParameters) + else { - if (const ClassType* exprClass = get(exprType)) + if (FFlag::LuauTypecheckClassTypeIndexers) { - if (isNonstrictMode()) - return unknownType; - reportError(TypeError{expr.location, DynamicPropertyLookupOnClassesUnsafe{exprType}}); - return errorRecoveryType(scope); + if (const ClassType* exprClass = get(exprType)) + { + if (auto indexer = exprClass->indexer) + { + unify(indexType, indexer->indexType, scope, expr.index->location); + return indexer->indexResultType; + } + } + } + + if (FFlag::LuauAllowIndexClassParameters) + { + if (const ClassType* exprClass = get(exprType)) + { + if (isNonstrictMode()) + return unknownType; + reportError(TypeError{expr.location, DynamicPropertyLookupOnClassesUnsafe{exprType}}); + return errorRecoveryType(scope); + } } } diff --git a/Analysis/src/Unifier.cpp b/Analysis/src/Unifier.cpp index 6047a49..56be404 100644 --- a/Analysis/src/Unifier.cpp +++ b/Analysis/src/Unifier.cpp @@ -12,6 +12,7 @@ #include "Luau/TypeUtils.h" #include "Luau/Type.h" #include "Luau/VisitType.h" +#include "Luau/TypeFamily.h" #include @@ -20,6 +21,7 @@ LUAU_FASTFLAG(LuauErrorRecoveryType) LUAU_FASTFLAGVARIABLE(LuauInstantiateInSubtyping, false) LUAU_FASTFLAGVARIABLE(LuauUninhabitedSubAnything2, false) LUAU_FASTFLAGVARIABLE(LuauVariadicAnyCanBeGeneric, false) +LUAU_FASTFLAGVARIABLE(LuauUnifyTwoOptions, false) LUAU_FASTFLAGVARIABLE(LuauMaintainScopesInUnifier, false) LUAU_FASTFLAGVARIABLE(LuauTransitiveSubtyping, false) LUAU_FASTFLAGVARIABLE(LuauOccursIsntAlwaysFailure, false) @@ -439,6 +441,30 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool if (superTy == subTy) return; + if (log.get(superTy)) + { + // We do not report errors from reducing here. This is because we will + // "double-report" errors in some cases, like when trying to unify + // identical type family instantiations like Add with + // Add. + reduceFamilies(superTy, location, NotNull(types), builtinTypes, &log); + superTy = log.follow(superTy); + } + + if (log.get(subTy)) + { + reduceFamilies(subTy, location, NotNull(types), builtinTypes, &log); + subTy = log.follow(subTy); + } + + // If we can't reduce the families down and we still have type family types + // here, we are stuck. Nothing meaningful can be done here. We don't wish to + // report an error, either. + if (log.get(superTy) || log.get(subTy)) + { + return; + } + auto superFree = log.getMutable(superTy); auto subFree = log.getMutable(subTy); @@ -509,6 +535,49 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool return; } + if (hideousFixMeGenericsAreActuallyFree) + { + auto superGeneric = log.getMutable(superTy); + auto subGeneric = log.getMutable(subTy); + + if (superGeneric && subGeneric && subsumes(useScopes, superGeneric, subGeneric)) + { + if (!occursCheck(subTy, superTy, /* reversed = */ false)) + log.replace(subTy, BoundType(superTy)); + + return; + } + else if (superGeneric && subGeneric) + { + if (!occursCheck(superTy, subTy, /* reversed = */ true)) + log.replace(superTy, BoundType(subTy)); + + return; + } + else if (superGeneric) + { + if (!occursCheck(superTy, subTy, /* reversed = */ true)) + { + Widen widen{types, builtinTypes}; + log.replace(superTy, BoundType(widen(subTy))); + } + + return; + } + else if (subGeneric) + { + // Normally, if the subtype is free, it should not be bound to any, unknown, or error types. + // But for bug compatibility, we'll only apply this rule to unknown. Doing this will silence cascading type errors. + if (log.get(superTy)) + return; + + if (!occursCheck(subTy, superTy, /* reversed = */ false)) + log.replace(subTy, BoundType(superTy)); + + return; + } + } + if (log.get(superTy)) return tryUnifyWithAny(subTy, builtinTypes->anyType); @@ -687,8 +756,93 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool log.popSeen(superTy, subTy); } +/* + * If the passed type is an option, strip nil out. + * + * There is an important subtlety to be observed here: + * + * We want to do a peephole fix to unify the subtype relation A? <: B? where we + * instead peel off the options and relate A <: B instead, but only works if we + * are certain that neither A nor B are themselves optional. + * + * For instance, if we want to test that (boolean?)? <: boolean?, we must peel + * off both layers of optionality from the subTy. + * + * We must also handle unions that have more than two choices. + * + * eg (string | nil)? <: boolean? + */ +static std::optional unwrapOption(NotNull builtinTypes, NotNull arena, const TxnLog& log, TypeId ty, DenseHashSet& seen) +{ + if (seen.find(ty)) + return std::nullopt; + seen.insert(ty); + + const UnionType* ut = get(follow(ty)); + if (!ut) + return std::nullopt; + + if (2 == ut->options.size()) + { + if (isNil(follow(ut->options[0]))) + { + std::optional doubleUnwrapped = unwrapOption(builtinTypes, arena, log, ut->options[1], seen); + return doubleUnwrapped.value_or(ut->options[1]); + } + if (isNil(follow(ut->options[1]))) + { + std::optional doubleUnwrapped = unwrapOption(builtinTypes, arena, log, ut->options[0], seen); + return doubleUnwrapped.value_or(ut->options[0]); + } + } + + std::set newOptions; + bool found = false; + for (TypeId t : ut) + { + t = log.follow(t); + if (isNil(t)) + { + found = true; + continue; + } + else + newOptions.insert(t); + } + + if (!found) + return std::nullopt; + else if (newOptions.empty()) + return builtinTypes->neverType; + else if (1 == newOptions.size()) + return *begin(newOptions); + else + return arena->addType(UnionType{std::vector(begin(newOptions), end(newOptions))}); +} + +static std::optional unwrapOption(NotNull builtinTypes, NotNull arena, const TxnLog& log, TypeId ty) +{ + DenseHashSet seen{nullptr}; + + return unwrapOption(builtinTypes, arena, log, ty, seen); +} + + void Unifier::tryUnifyUnionWithType(TypeId subTy, const UnionType* subUnion, TypeId superTy) { + // Peephole fix: A? <: B? if A <: B + // + // This works around issues that can arise if A or B is free. We do not + // want either of those types to be bound to nil. + if (FFlag::LuauUnifyTwoOptions) + { + if (auto subOption = unwrapOption(builtinTypes, NotNull{types}, log, subTy)) + { + if (auto superOption = unwrapOption(builtinTypes, NotNull{types}, log, superTy)) + return tryUnify_(*subOption, *superOption); + } + } + // A | B <: T if and only if A <: T and B <: T bool failed = false; bool errorsSuppressed = true; @@ -1205,6 +1359,25 @@ void Unifier::tryUnifyNormalizedTypes( const ClassType* superCtv = get(superClass); LUAU_ASSERT(superCtv); + if (FFlag::LuauUnifyTwoOptions) + { + if (variance == Invariant) + { + if (subCtv == superCtv) + { + found = true; + + /* + * The only way we could care about superNegations is if + * one of them was equal to superCtv. However, + * normalization ensures that this is impossible. + */ + } + else + continue; + } + } + if (isSubclass(subCtv, superCtv)) { found = true; @@ -1518,6 +1691,12 @@ struct WeirdIter } }; +void Unifier::enableScopeTests() +{ + useScopes = true; + log.useScopes = true; +} + ErrorVec Unifier::canUnify(TypeId subTy, TypeId superTy) { Unifier s = makeChildUnifier(); @@ -1597,6 +1776,21 @@ void Unifier::tryUnify_(TypePackId subTp, TypePackId superTp, bool isFunctionCal log.replace(subTp, Unifiable::Bound(superTp)); } } + else if (hideousFixMeGenericsAreActuallyFree && log.getMutable(superTp)) + { + if (!occursCheck(superTp, subTp, /* reversed = */ true)) + { + Widen widen{types, builtinTypes}; + log.replace(superTp, Unifiable::Bound(widen(subTp))); + } + } + else if (hideousFixMeGenericsAreActuallyFree && log.getMutable(subTp)) + { + if (!occursCheck(subTp, superTp, /* reversed = */ false)) + { + log.replace(subTp, Unifiable::Bound(superTp)); + } + } else if (log.getMutable(superTp)) tryUnifyWithAny(subTp, superTp); else if (log.getMutable(subTp)) @@ -2611,7 +2805,10 @@ void Unifier::tryUnifyVariadics(TypePackId subTp, TypePackId superTp, bool rever } else if (get(tail)) { - reportError(location, GenericError{"Cannot unify variadic and generic packs"}); + if (!hideousFixMeGenericsAreActuallyFree) + reportError(location, GenericError{"Cannot unify variadic and generic packs"}); + else + log.replace(tail, BoundTypePack{superTp}); } else if (get(tail)) { @@ -2732,7 +2929,7 @@ std::optional Unifier::findTablePropertyRespectingMeta(TypeId lhsType, N TxnLog Unifier::combineLogsIntoIntersection(std::vector logs) { LUAU_ASSERT(FFlag::DebugLuauDeferredConstraintResolution); - TxnLog result; + TxnLog result(useScopes); for (TxnLog& log : logs) result.concatAsIntersections(std::move(log), NotNull{types}); return result; @@ -2741,7 +2938,7 @@ TxnLog Unifier::combineLogsIntoIntersection(std::vector logs) TxnLog Unifier::combineLogsIntoUnion(std::vector logs) { LUAU_ASSERT(FFlag::DebugLuauDeferredConstraintResolution); - TxnLog result; + TxnLog result(useScopes); for (TxnLog& log : logs) result.concatAsUnion(std::move(log), NotNull{types}); return result; @@ -2807,7 +3004,7 @@ bool Unifier::occursCheck(DenseHashSet& seen, TypeId needle, TypeId hays if (log.getMutable(needle)) return false; - if (!log.getMutable(needle)) + if (!log.getMutable(needle) && !(hideousFixMeGenericsAreActuallyFree && log.is(needle))) ice("Expected needle to be free"); if (needle == haystack) @@ -2821,7 +3018,7 @@ bool Unifier::occursCheck(DenseHashSet& seen, TypeId needle, TypeId hays return true; } - if (log.getMutable(haystack)) + if (log.getMutable(haystack) || (hideousFixMeGenericsAreActuallyFree && log.is(haystack))) return false; else if (auto a = log.getMutable(haystack)) { @@ -2865,7 +3062,7 @@ bool Unifier::occursCheck(DenseHashSet& seen, TypePackId needle, Typ if (log.getMutable(needle)) return false; - if (!log.getMutable(needle)) + if (!log.getMutable(needle) && !(hideousFixMeGenericsAreActuallyFree && log.is(needle))) ice("Expected needle pack to be free"); RecursionLimiter _ra(&sharedState.counters.recursionCount, sharedState.counters.recursionLimit); @@ -2900,7 +3097,10 @@ Unifier Unifier::makeChildUnifier() Unifier u = Unifier{normalizer, mode, scope, location, variance, &log}; u.normalize = normalize; u.checkInhabited = checkInhabited; - u.useScopes = useScopes; + + if (useScopes) + u.enableScopeTests(); + return u; } diff --git a/CLI/Repl.cpp b/CLI/Repl.cpp index 4303364..a585a73 100644 --- a/CLI/Repl.cpp +++ b/CLI/Repl.cpp @@ -27,6 +27,10 @@ #include #endif +#ifdef __linux__ +#include +#endif + #ifdef CALLGRIND #include #endif @@ -865,6 +869,7 @@ int replMain(int argc, char** argv) int profile = 0; bool coverage = false; bool interactive = false; + bool codegenPerf = false; // Set the mode if the user has explicitly specified one. int argStart = 1; @@ -962,6 +967,11 @@ int replMain(int argc, char** argv) { codegen = true; } + else if (strcmp(argv[i], "--codegen-perf") == 0) + { + codegen = true; + codegenPerf = true; + } else if (strcmp(argv[i], "--coverage") == 0) { coverage = true; @@ -998,6 +1008,24 @@ int replMain(int argc, char** argv) } #endif + if (codegenPerf) + { +#if __linux__ + char path[128]; + snprintf(path, sizeof(path), "/tmp/perf-%d.map", getpid()); + + // note, there's no need to close the log explicitly as it will be closed when the process exits + FILE* codegenPerfLog = fopen(path, "w"); + + Luau::CodeGen::setPerfLog(codegenPerfLog, [](void* context, uintptr_t addr, unsigned size, const char* symbol) { + fprintf(static_cast(context), "%016lx %08x %s\n", long(addr), size, symbol); + }); +#else + fprintf(stderr, "--codegen-perf option is only supported on Linux\n"); + return 1; +#endif + } + const std::vector files = getSourceFiles(argc, argv); if (mode == CliMode::Unknown) { diff --git a/CodeGen/include/Luau/AssemblyBuilderA64.h b/CodeGen/include/Luau/AssemblyBuilderA64.h index 26be11c..e7733cd 100644 --- a/CodeGen/include/Luau/AssemblyBuilderA64.h +++ b/CodeGen/include/Luau/AssemblyBuilderA64.h @@ -56,7 +56,7 @@ public: void eor(RegisterA64 dst, RegisterA64 src1, RegisterA64 src2, int shift = 0); void bic(RegisterA64 dst, RegisterA64 src1, RegisterA64 src2, int shift = 0); void tst(RegisterA64 src1, RegisterA64 src2, int shift = 0); - void mvn(RegisterA64 dst, RegisterA64 src); + void mvn_(RegisterA64 dst, RegisterA64 src); // Bitwise with immediate // Note: immediate must have a single contiguous sequence of 1 bits set of length 1..31 @@ -199,7 +199,7 @@ private: void placeR1(const char* name, RegisterA64 dst, RegisterA64 src, uint32_t op); void placeI12(const char* name, RegisterA64 dst, RegisterA64 src1, int src2, uint8_t op); void placeI16(const char* name, RegisterA64 dst, int src, uint8_t op, int shift = 0); - void placeA(const char* name, RegisterA64 dst, AddressA64 src, uint8_t op, uint8_t size, int sizelog); + void placeA(const char* name, RegisterA64 dst, AddressA64 src, uint16_t opsize, int sizelog); void placeB(const char* name, Label& label, uint8_t op); void placeBC(const char* name, Label& label, uint8_t op, uint8_t cond); void placeBCR(const char* name, Label& label, uint8_t op, RegisterA64 cond); diff --git a/CodeGen/include/Luau/AssemblyBuilderX64.h b/CodeGen/include/Luau/AssemblyBuilderX64.h index e162cd3..a372bf9 100644 --- a/CodeGen/include/Luau/AssemblyBuilderX64.h +++ b/CodeGen/include/Luau/AssemblyBuilderX64.h @@ -2,6 +2,7 @@ #pragma once #include "Luau/Common.h" +#include "Luau/DenseHash.h" #include "Luau/Label.h" #include "Luau/ConditionX64.h" #include "Luau/OperandX64.h" @@ -250,6 +251,8 @@ private: std::vector