luau/Analysis/src/Normalize.cpp

761 lines
23 KiB
C++
Raw Normal View History

2022-04-14 19:57:43 -04:00
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "Luau/Normalize.h"
#include <algorithm>
#include "Luau/Clone.h"
#include "Luau/Unifier.h"
#include "Luau/VisitTypeVar.h"
LUAU_FASTFLAGVARIABLE(DebugLuauCopyBeforeNormalizing, false)
// This could theoretically be 2000 on amd64, but x86 requires this.
LUAU_FASTINTVARIABLE(LuauNormalizeIterationLimit, 1200);
LUAU_FASTFLAGVARIABLE(LuauNormalizeCombineTableFix, false);
2022-05-19 20:02:24 -04:00
LUAU_FASTFLAGVARIABLE(LuauNormalizeFlagIsConservative, false);
2022-07-14 18:52:26 -04:00
LUAU_FASTFLAGVARIABLE(LuauFixNormalizationOfCyclicUnions, false);
2022-07-07 21:22:39 -04:00
LUAU_FASTFLAG(LuauUnknownAndNeverType)
2022-06-16 21:05:14 -04:00
LUAU_FASTFLAG(LuauQuantifyConstrained)
2022-04-14 19:57:43 -04:00
namespace Luau
{
namespace
{
2022-06-30 19:52:43 -04:00
struct Replacer
2022-04-14 19:57:43 -04:00
{
2022-06-30 19:52:43 -04:00
TypeArena* arena;
2022-04-14 19:57:43 -04:00
TypeId sourceType;
TypeId replacedType;
2022-06-30 19:52:43 -04:00
DenseHashMap<TypeId, TypeId> newTypes;
2022-04-14 19:57:43 -04:00
Replacer(TypeArena* arena, TypeId sourceType, TypeId replacedType)
2022-06-30 19:52:43 -04:00
: arena(arena)
2022-04-14 19:57:43 -04:00
, sourceType(sourceType)
, replacedType(replacedType)
2022-06-30 19:52:43 -04:00
, newTypes(nullptr)
2022-04-14 19:57:43 -04:00
{
}
TypeId smartClone(TypeId t)
{
2022-06-30 19:52:43 -04:00
t = follow(t);
TypeId* res = newTypes.find(t);
if (res)
return *res;
2022-06-30 19:52:43 -04:00
TypeId result = shallowClone(t, *arena, TxnLog::empty());
newTypes[t] = result;
newTypes[result] = result;
2022-06-30 19:52:43 -04:00
return result;
2022-04-14 19:57:43 -04:00
}
};
} // anonymous namespace
bool isSubtype(TypeId subTy, TypeId superTy, InternalErrorReporter& ice)
{
UnifierSharedState sharedState{&ice};
TypeArena arena;
Unifier u{&arena, Mode::Strict, Location{}, Covariant, sharedState};
u.anyIsTop = true;
u.tryUnify(subTy, superTy);
const bool ok = u.errors.empty() && u.log.empty();
return ok;
}
2022-06-16 21:05:14 -04:00
bool isSubtype(TypePackId subPack, TypePackId superPack, InternalErrorReporter& ice)
{
UnifierSharedState sharedState{&ice};
TypeArena arena;
Unifier u{&arena, Mode::Strict, Location{}, Covariant, sharedState};
u.anyIsTop = true;
u.tryUnify(subPack, superPack);
const bool ok = u.errors.empty() && u.log.empty();
return ok;
}
2022-04-14 19:57:43 -04:00
template<typename T>
2022-04-21 17:44:27 -04:00
static bool areNormal_(const T& t, const std::unordered_set<void*>& seen, InternalErrorReporter& ice)
2022-04-14 19:57:43 -04:00
{
int count = 0;
auto isNormal = [&](TypeId ty) {
++count;
if (count >= FInt::LuauNormalizeIterationLimit)
ice.ice("Luau::areNormal hit iteration limit");
2022-05-19 20:02:24 -04:00
if (FFlag::LuauNormalizeFlagIsConservative)
return ty->normal;
else
{
// The follow is here because a bound type may not be normal, but the bound type is normal.
return ty->normal || follow(ty)->normal || seen.find(asMutable(ty)) != seen.end();
}
2022-04-14 19:57:43 -04:00
};
return std::all_of(begin(t), end(t), isNormal);
}
2022-04-21 17:44:27 -04:00
static bool areNormal(const std::vector<TypeId>& types, const std::unordered_set<void*>& seen, InternalErrorReporter& ice)
2022-04-14 19:57:43 -04:00
{
return areNormal_(types, seen, ice);
}
2022-04-21 17:44:27 -04:00
static bool areNormal(TypePackId tp, const std::unordered_set<void*>& seen, InternalErrorReporter& ice)
2022-04-14 19:57:43 -04:00
{
tp = follow(tp);
if (get<FreeTypePack>(tp))
return false;
auto [head, tail] = flatten(tp);
if (!areNormal_(head, seen, ice))
return false;
if (!tail)
return true;
if (auto vtp = get<VariadicTypePack>(*tail))
2022-04-21 17:44:27 -04:00
return vtp->ty->normal || follow(vtp->ty)->normal || seen.find(asMutable(vtp->ty)) != seen.end();
2022-04-14 19:57:43 -04:00
return true;
}
#define CHECK_ITERATION_LIMIT(...) \
do \
{ \
if (iterationLimit > FInt::LuauNormalizeIterationLimit) \
{ \
limitExceeded = true; \
return __VA_ARGS__; \
} \
++iterationLimit; \
} while (false)
2022-05-05 20:03:43 -04:00
struct Normalize final : TypeVarVisitor
2022-04-14 19:57:43 -04:00
{
2022-05-05 20:03:43 -04:00
using TypeVarVisitor::Set;
Normalize(TypeArena& arena, InternalErrorReporter& ice)
: arena(arena)
, ice(ice)
{
}
2022-04-14 19:57:43 -04:00
TypeArena& arena;
InternalErrorReporter& ice;
int iterationLimit = 0;
bool limitExceeded = false;
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const FreeTypeVar&) override
2022-04-14 19:57:43 -04:00
{
LUAU_ASSERT(!ty->normal);
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const BoundTypeVar& btv) override
2022-04-14 19:57:43 -04:00
{
2022-04-21 17:44:27 -04:00
// A type could be considered normal when it is in the stack, but we will eventually find out it is not normal as normalization progresses.
// So we need to avoid eagerly saying that this bound type is normal if the thing it is bound to is in the stack.
if (seen.find(asMutable(btv.boundTo)) != seen.end())
return false;
// It should never be the case that this TypeVar is normal, but is bound to a non-normal type, except in nontrivial cases.
2022-04-14 19:57:43 -04:00
LUAU_ASSERT(!ty->normal || ty->normal == btv.boundTo->normal);
asMutable(ty)->normal = btv.boundTo->normal;
return !ty->normal;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const PrimitiveTypeVar&) override
2022-04-14 19:57:43 -04:00
{
LUAU_ASSERT(ty->normal);
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const GenericTypeVar&) override
2022-04-14 19:57:43 -04:00
{
if (!ty->normal)
asMutable(ty)->normal = true;
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const ErrorTypeVar&) override
2022-04-14 19:57:43 -04:00
{
if (!ty->normal)
asMutable(ty)->normal = true;
return false;
}
2022-07-07 21:22:39 -04:00
bool visit(TypeId ty, const UnknownTypeVar&) override
{
if (!ty->normal)
asMutable(ty)->normal = true;
return false;
}
bool visit(TypeId ty, const NeverTypeVar&) override
{
if (!ty->normal)
asMutable(ty)->normal = true;
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const ConstrainedTypeVar& ctvRef) override
2022-04-14 19:57:43 -04:00
{
CHECK_ITERATION_LIMIT(false);
2022-06-16 21:05:14 -04:00
LUAU_ASSERT(!ty->normal);
2022-04-14 19:57:43 -04:00
ConstrainedTypeVar* ctv = const_cast<ConstrainedTypeVar*>(&ctvRef);
std::vector<TypeId> parts = std::move(ctv->parts);
// We might transmute, so it's not safe to rely on the builtin traversal logic of visitTypeVar
for (TypeId part : parts)
2022-05-05 20:03:43 -04:00
traverse(part);
2022-04-14 19:57:43 -04:00
std::vector<TypeId> newParts = normalizeUnion(parts);
2022-06-16 21:05:14 -04:00
if (FFlag::LuauQuantifyConstrained)
{
ctv->parts = std::move(newParts);
}
2022-04-14 19:57:43 -04:00
else
2022-06-16 21:05:14 -04:00
{
const bool normal = areNormal(newParts, seen, ice);
2022-04-14 19:57:43 -04:00
2022-06-16 21:05:14 -04:00
if (newParts.size() == 1)
*asMutable(ty) = BoundTypeVar{newParts[0]};
else
*asMutable(ty) = UnionTypeVar{std::move(newParts)};
asMutable(ty)->normal = normal;
}
2022-04-14 19:57:43 -04:00
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const FunctionTypeVar& ftv) override
2022-04-14 19:57:43 -04:00
{
CHECK_ITERATION_LIMIT(false);
if (ty->normal)
return false;
2022-05-05 20:03:43 -04:00
traverse(ftv.argTypes);
2022-06-16 21:05:14 -04:00
traverse(ftv.retTypes);
2022-04-14 19:57:43 -04:00
2022-06-16 21:05:14 -04:00
asMutable(ty)->normal = areNormal(ftv.argTypes, seen, ice) && areNormal(ftv.retTypes, seen, ice);
2022-04-14 19:57:43 -04:00
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const TableTypeVar& ttv) override
2022-04-14 19:57:43 -04:00
{
CHECK_ITERATION_LIMIT(false);
if (ty->normal)
return false;
bool normal = true;
auto checkNormal = [&](TypeId t) {
// if t is on the stack, it is possible that this type is normal.
// If t is not normal and it is not on the stack, this type is definitely not normal.
2022-04-21 17:44:27 -04:00
if (!t->normal && seen.find(asMutable(t)) == seen.end())
2022-04-14 19:57:43 -04:00
normal = false;
};
if (ttv.boundTo)
{
2022-05-05 20:03:43 -04:00
traverse(*ttv.boundTo);
2022-04-14 19:57:43 -04:00
asMutable(ty)->normal = (*ttv.boundTo)->normal;
return false;
}
for (const auto& [_name, prop] : ttv.props)
{
2022-05-05 20:03:43 -04:00
traverse(prop.type);
2022-04-14 19:57:43 -04:00
checkNormal(prop.type);
}
if (ttv.indexer)
{
2022-05-05 20:03:43 -04:00
traverse(ttv.indexer->indexType);
2022-04-14 19:57:43 -04:00
checkNormal(ttv.indexer->indexType);
2022-05-05 20:03:43 -04:00
traverse(ttv.indexer->indexResultType);
2022-04-14 19:57:43 -04:00
checkNormal(ttv.indexer->indexResultType);
}
2022-06-16 21:05:14 -04:00
// An unsealed table can never be normal, ditto for free tables iff the type it is bound to is also not normal.
if (FFlag::LuauQuantifyConstrained)
{
if (ttv.state == TableState::Generic || ttv.state == TableState::Sealed || (ttv.state == TableState::Free && follow(ty)->normal))
asMutable(ty)->normal = normal;
}
else
asMutable(ty)->normal = normal;
2022-04-14 19:57:43 -04:00
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const MetatableTypeVar& mtv) override
2022-04-14 19:57:43 -04:00
{
CHECK_ITERATION_LIMIT(false);
if (ty->normal)
return false;
2022-05-05 20:03:43 -04:00
traverse(mtv.table);
traverse(mtv.metatable);
2022-04-14 19:57:43 -04:00
asMutable(ty)->normal = mtv.table->normal && mtv.metatable->normal;
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const ClassTypeVar& ctv) override
2022-04-14 19:57:43 -04:00
{
if (!ty->normal)
asMutable(ty)->normal = true;
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const AnyTypeVar&) override
2022-04-14 19:57:43 -04:00
{
LUAU_ASSERT(ty->normal);
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const UnionTypeVar& utvRef) override
2022-04-14 19:57:43 -04:00
{
CHECK_ITERATION_LIMIT(false);
if (ty->normal)
return false;
UnionTypeVar* utv = &const_cast<UnionTypeVar&>(utvRef);
2022-07-14 18:52:26 -04:00
// TODO: Clip tempOptions and optionsRef when clipping FFlag::LuauFixNormalizationOfCyclicUnions
std::vector<TypeId> tempOptions;
if (!FFlag::LuauFixNormalizationOfCyclicUnions)
tempOptions = std::move(utv->options);
std::vector<TypeId>& optionsRef = FFlag::LuauFixNormalizationOfCyclicUnions ? utv->options : tempOptions;
2022-04-14 19:57:43 -04:00
// We might transmute, so it's not safe to rely on the builtin traversal logic of visitTypeVar
2022-07-14 18:52:26 -04:00
for (TypeId option : optionsRef)
2022-05-05 20:03:43 -04:00
traverse(option);
2022-04-14 19:57:43 -04:00
2022-07-14 18:52:26 -04:00
std::vector<TypeId> newOptions = normalizeUnion(optionsRef);
2022-04-14 19:57:43 -04:00
const bool normal = areNormal(newOptions, seen, ice);
LUAU_ASSERT(!newOptions.empty());
if (newOptions.size() == 1)
*asMutable(ty) = BoundTypeVar{newOptions[0]};
else
utv->options = std::move(newOptions);
asMutable(ty)->normal = normal;
return false;
}
2022-05-05 20:03:43 -04:00
bool visit(TypeId ty, const IntersectionTypeVar& itvRef) override
2022-04-14 19:57:43 -04:00
{
CHECK_ITERATION_LIMIT(false);
if (ty->normal)
return false;
IntersectionTypeVar* itv = &const_cast<IntersectionTypeVar&>(itvRef);
2022-07-14 18:52:26 -04:00
if (FFlag::LuauFixNormalizationOfCyclicUnions)
{
std::vector<TypeId> oldParts = itv->parts;
IntersectionTypeVar newIntersection;
2022-04-14 19:57:43 -04:00
2022-07-14 18:52:26 -04:00
for (TypeId part : oldParts)
traverse(part);
2022-04-14 19:57:43 -04:00
2022-07-14 18:52:26 -04:00
std::vector<TypeId> tables;
for (TypeId part : oldParts)
2022-04-14 19:57:43 -04:00
{
2022-07-14 18:52:26 -04:00
part = follow(part);
if (get<TableTypeVar>(part))
tables.push_back(part);
else
{
Replacer replacer{&arena, nullptr, nullptr}; // FIXME this is super super WEIRD
combineIntoIntersection(replacer, &newIntersection, part);
}
2022-04-14 19:57:43 -04:00
}
2022-07-14 18:52:26 -04:00
// Don't allocate a new table if there's just one in the intersection.
if (tables.size() == 1)
newIntersection.parts.push_back(tables[0]);
else if (!tables.empty())
{
const TableTypeVar* first = get<TableTypeVar>(tables[0]);
LUAU_ASSERT(first);
TypeId newTable = arena.addType(TableTypeVar{first->state, first->level});
TableTypeVar* ttv = getMutable<TableTypeVar>(newTable);
for (TypeId part : tables)
{
// Intuition: If combineIntoTable() needs to clone a table, any references to 'part' are cyclic and need
// to be rewritten to point at 'newTable' in the clone.
Replacer replacer{&arena, part, newTable};
combineIntoTable(replacer, ttv, part);
}
newIntersection.parts.push_back(newTable);
}
itv->parts = std::move(newIntersection.parts);
asMutable(ty)->normal = areNormal(itv->parts, seen, ice);
if (itv->parts.size() == 1)
{
TypeId part = itv->parts[0];
*asMutable(ty) = BoundTypeVar{part};
}
}
else
2022-04-14 19:57:43 -04:00
{
2022-07-14 18:52:26 -04:00
std::vector<TypeId> oldParts = std::move(itv->parts);
2022-04-14 19:57:43 -04:00
2022-07-14 18:52:26 -04:00
for (TypeId part : oldParts)
traverse(part);
std::vector<TypeId> tables;
for (TypeId part : oldParts)
2022-04-14 19:57:43 -04:00
{
2022-07-14 18:52:26 -04:00
part = follow(part);
if (get<TableTypeVar>(part))
tables.push_back(part);
else
{
Replacer replacer{&arena, nullptr, nullptr}; // FIXME this is super super WEIRD
combineIntoIntersection(replacer, itv, part);
}
2022-04-14 19:57:43 -04:00
}
2022-07-14 18:52:26 -04:00
// Don't allocate a new table if there's just one in the intersection.
if (tables.size() == 1)
itv->parts.push_back(tables[0]);
else if (!tables.empty())
{
const TableTypeVar* first = get<TableTypeVar>(tables[0]);
LUAU_ASSERT(first);
2022-04-14 19:57:43 -04:00
2022-07-14 18:52:26 -04:00
TypeId newTable = arena.addType(TableTypeVar{first->state, first->level});
TableTypeVar* ttv = getMutable<TableTypeVar>(newTable);
for (TypeId part : tables)
{
// Intuition: If combineIntoTable() needs to clone a table, any references to 'part' are cyclic and need
// to be rewritten to point at 'newTable' in the clone.
Replacer replacer{&arena, part, newTable};
combineIntoTable(replacer, ttv, part);
}
2022-04-14 19:57:43 -04:00
2022-07-14 18:52:26 -04:00
itv->parts.push_back(newTable);
}
asMutable(ty)->normal = areNormal(itv->parts, seen, ice);
if (itv->parts.size() == 1)
{
TypeId part = itv->parts[0];
*asMutable(ty) = BoundTypeVar{part};
}
2022-04-14 19:57:43 -04:00
}
return false;
}
std::vector<TypeId> normalizeUnion(const std::vector<TypeId>& options)
{
if (options.size() == 1)
return options;
std::vector<TypeId> result;
for (TypeId part : options)
2022-07-07 21:22:39 -04:00
{
// AnyTypeVar always win the battle no matter what we do, so we're done.
if (FFlag::LuauUnknownAndNeverType && get<AnyTypeVar>(follow(part)))
return {part};
2022-04-14 19:57:43 -04:00
combineIntoUnion(result, part);
2022-07-07 21:22:39 -04:00
}
2022-04-14 19:57:43 -04:00
return result;
}
void combineIntoUnion(std::vector<TypeId>& result, TypeId ty)
{
ty = follow(ty);
if (auto utv = get<UnionTypeVar>(ty))
{
for (TypeId t : utv)
2022-07-07 21:22:39 -04:00
{
// AnyTypeVar always win the battle no matter what we do, so we're done.
if (FFlag::LuauUnknownAndNeverType && get<AnyTypeVar>(t))
{
result = {t};
return;
}
2022-04-14 19:57:43 -04:00
combineIntoUnion(result, t);
2022-07-07 21:22:39 -04:00
}
2022-04-14 19:57:43 -04:00
return;
}
for (TypeId& part : result)
{
if (isSubtype(ty, part, ice))
return; // no need to do anything
else if (isSubtype(part, ty, ice))
{
part = ty; // replace the less general type by the more general one
return;
}
}
result.push_back(ty);
}
/**
* @param replacer knows how to clone a type such that any recursive references point at the new containing type.
* @param result is an intersection that is safe for us to mutate in-place.
*/
void combineIntoIntersection(Replacer& replacer, IntersectionTypeVar* result, TypeId ty)
{
// Note: this check guards against running out of stack space
// so if you increase the size of a stack frame, you'll need to decrease the limit.
CHECK_ITERATION_LIMIT();
ty = follow(ty);
if (auto itv = get<IntersectionTypeVar>(ty))
{
for (TypeId part : itv->parts)
combineIntoIntersection(replacer, result, part);
return;
}
// Let's say that the last part of our result intersection is always a table, if any table is part of this intersection
if (get<TableTypeVar>(ty))
{
if (result->parts.empty())
result->parts.push_back(arena.addType(TableTypeVar{TableState::Sealed, TypeLevel{}}));
TypeId theTable = result->parts.back();
2022-05-13 15:36:37 -04:00
if (!get<TableTypeVar>(follow(theTable)))
2022-04-14 19:57:43 -04:00
{
result->parts.push_back(arena.addType(TableTypeVar{TableState::Sealed, TypeLevel{}}));
theTable = result->parts.back();
}
TypeId newTable = replacer.smartClone(theTable);
result->parts.back() = newTable;
combineIntoTable(replacer, getMutable<TableTypeVar>(newTable), ty);
}
else if (auto ftv = get<FunctionTypeVar>(ty))
{
bool merged = false;
for (TypeId& part : result->parts)
{
if (isSubtype(part, ty, ice))
{
merged = true;
break; // no need to do anything
}
else if (isSubtype(ty, part, ice))
{
merged = true;
part = ty; // replace the less general type by the more general one
break;
}
}
if (!merged)
result->parts.push_back(ty);
}
else
result->parts.push_back(ty);
}
TableState combineTableStates(TableState lhs, TableState rhs)
{
if (lhs == rhs)
return lhs;
if (lhs == TableState::Free || rhs == TableState::Free)
return TableState::Free;
if (lhs == TableState::Unsealed || rhs == TableState::Unsealed)
return TableState::Unsealed;
return lhs;
}
/**
* @param replacer gives us a way to clone a type such that recursive references are rewritten to the new
* "containing" type.
* @param table always points into a table that is safe for us to mutate.
*/
void combineIntoTable(Replacer& replacer, TableTypeVar* table, TypeId ty)
{
// Note: this check guards against running out of stack space
// so if you increase the size of a stack frame, you'll need to decrease the limit.
CHECK_ITERATION_LIMIT();
LUAU_ASSERT(table);
ty = follow(ty);
TableTypeVar* tyTable = getMutable<TableTypeVar>(ty);
LUAU_ASSERT(tyTable);
for (const auto& [propName, prop] : tyTable->props)
{
if (auto it = table->props.find(propName); it != table->props.end())
{
/**
* If we are going to recursively merge intersections of tables, we need to ensure that we never mutate
* a table that comes from somewhere else in the type graph.
*
* smarClone() does some nice things for us: It will perform a clone that is as shallow as possible
* while still rewriting any cyclic references back to the new 'root' table.
*
* replacer also keeps a mapping of types that have previously been copied, so we have the added
* advantage here of knowing that, whether or not a new copy was actually made, the resulting TypeVar is
* safe for us to mutate in-place.
*/
TypeId clone = replacer.smartClone(it->second.type);
it->second.type = combine(replacer, clone, prop.type);
}
else
table->props.insert({propName, prop});
}
2022-07-14 18:52:26 -04:00
if (FFlag::LuauFixNormalizationOfCyclicUnions)
{
if (tyTable->indexer)
{
if (table->indexer)
{
table->indexer->indexType = combine(replacer, replacer.smartClone(tyTable->indexer->indexType), table->indexer->indexType);
table->indexer->indexResultType =
combine(replacer, replacer.smartClone(tyTable->indexer->indexResultType), table->indexer->indexResultType);
}
else
{
table->indexer =
TableIndexer{replacer.smartClone(tyTable->indexer->indexType), replacer.smartClone(tyTable->indexer->indexResultType)};
}
}
}
2022-04-14 19:57:43 -04:00
table->state = combineTableStates(table->state, tyTable->state);
table->level = max(table->level, tyTable->level);
}
/**
* @param a is always cloned by the caller. It is safe to mutate in-place.
* @param b will never be mutated.
*/
TypeId combine(Replacer& replacer, TypeId a, TypeId b)
{
2022-07-07 21:22:39 -04:00
b = follow(b);
2022-04-14 19:57:43 -04:00
if (FFlag::LuauNormalizeCombineTableFix && a == b)
return a;
if (!get<IntersectionTypeVar>(a) && !get<TableTypeVar>(a))
{
if (!FFlag::LuauNormalizeCombineTableFix && a == b)
return a;
else
return arena.addType(IntersectionTypeVar{{a, b}});
}
if (auto itv = getMutable<IntersectionTypeVar>(a))
{
combineIntoIntersection(replacer, itv, b);
return a;
}
else if (auto ttv = getMutable<TableTypeVar>(a))
{
2022-07-07 21:22:39 -04:00
if (FFlag::LuauNormalizeCombineTableFix && !get<TableTypeVar>(b))
2022-04-14 19:57:43 -04:00
return arena.addType(IntersectionTypeVar{{a, b}});
combineIntoTable(replacer, ttv, b);
return a;
}
LUAU_ASSERT(!"Impossible");
LUAU_UNREACHABLE();
}
};
#undef CHECK_ITERATION_LIMIT
/**
* @returns A tuple of TypeId and a success indicator. (true indicates that the normalization completed successfully)
*/
std::pair<TypeId, bool> normalize(TypeId ty, TypeArena& arena, InternalErrorReporter& ice)
{
CloneState state;
if (FFlag::DebugLuauCopyBeforeNormalizing)
(void)clone(ty, arena, state);
2022-05-05 20:03:43 -04:00
Normalize n{arena, ice};
n.traverse(ty);
2022-04-14 19:57:43 -04:00
return {ty, !n.limitExceeded};
}
// TODO: Think about using a temporary arena and cloning types out of it so that we
// reclaim memory used by wantonly allocated intermediate types here.
// The main wrinkle here is that we don't want clone() to copy a type if the source and dest
// arena are the same.
std::pair<TypeId, bool> normalize(TypeId ty, const ModulePtr& module, InternalErrorReporter& ice)
{
return normalize(ty, module->internalTypes, ice);
}
/**
* @returns A tuple of TypeId and a success indicator. (true indicates that the normalization completed successfully)
*/
std::pair<TypePackId, bool> normalize(TypePackId tp, TypeArena& arena, InternalErrorReporter& ice)
{
CloneState state;
if (FFlag::DebugLuauCopyBeforeNormalizing)
(void)clone(tp, arena, state);
2022-05-05 20:03:43 -04:00
Normalize n{arena, ice};
n.traverse(tp);
2022-04-14 19:57:43 -04:00
return {tp, !n.limitExceeded};
}
std::pair<TypePackId, bool> normalize(TypePackId tp, const ModulePtr& module, InternalErrorReporter& ice)
{
return normalize(tp, module->internalTypes, ice);
}
} // namespace Luau