luau/Analysis/src/Module.cpp

196 lines
5.5 KiB
C++
Raw Normal View History

// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "Luau/Module.h"
2022-04-07 17:29:01 -04:00
#include "Luau/Clone.h"
2022-04-14 19:57:43 -04:00
#include "Luau/Common.h"
#include "Luau/ConstraintGraphBuilder.h"
2022-04-14 19:57:43 -04:00
#include "Luau/Normalize.h"
#include "Luau/RecursionCounter.h"
#include "Luau/Scope.h"
#include "Luau/TypeInfer.h"
#include "Luau/TypePack.h"
#include "Luau/TypeVar.h"
#include "Luau/VisitTypeVar.h"
#include <algorithm>
2022-05-19 20:02:24 -04:00
LUAU_FASTFLAG(LuauLowerBoundsCalculation);
LUAU_FASTFLAG(LuauNormalizeFlagIsConservative);
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
2022-07-21 17:16:54 -04:00
LUAU_FASTFLAGVARIABLE(LuauForceExportSurfacesToBeNormal, false);
namespace Luau
{
static bool contains(Position pos, Comment comment)
{
if (comment.location.contains(pos))
return true;
else if (comment.type == Lexeme::BrokenComment &&
comment.location.begin <= pos) // Broken comments are broken specifically because they don't have an end
return true;
else if (comment.type == Lexeme::Comment && comment.location.end == pos)
return true;
else
return false;
}
bool isWithinComment(const SourceModule& sourceModule, Position pos)
{
auto iter = std::lower_bound(sourceModule.commentLocations.begin(), sourceModule.commentLocations.end(),
Comment{Lexeme::Comment, Location{pos, pos}}, [](const Comment& a, const Comment& b) {
return a.location.end < b.location.end;
});
if (iter == sourceModule.commentLocations.end())
return false;
if (contains(pos, *iter))
return true;
// Due to the nature of std::lower_bound, it is possible that iter points at a comment that ends
// at pos. We'll try the next comment, if it exists.
++iter;
if (iter == sourceModule.commentLocations.end())
return false;
return contains(pos, *iter);
}
2022-05-19 20:02:24 -04:00
struct ForceNormal : TypeVarOnceVisitor
{
const TypeArena* typeArena = nullptr;
ForceNormal(const TypeArena* typeArena)
: typeArena(typeArena)
{
}
2022-05-19 20:02:24 -04:00
bool visit(TypeId ty) override
{
if (ty->owningArena != typeArena)
return false;
2022-05-19 20:02:24 -04:00
asMutable(ty)->normal = true;
return true;
}
2022-05-19 20:02:24 -04:00
bool visit(TypeId ty, const FreeTypeVar& ftv) override
{
visit(ty);
return true;
}
2022-05-19 20:02:24 -04:00
bool visit(TypePackId tp, const FreeTypePack& ftp) override
{
return true;
}
};
Module::~Module()
{
unfreeze(interfaceTypes);
unfreeze(internalTypes);
}
2022-05-19 20:02:24 -04:00
void Module::clonePublicInterface(InternalErrorReporter& ice)
{
LUAU_ASSERT(interfaceTypes.typeVars.empty());
LUAU_ASSERT(interfaceTypes.typePacks.empty());
CloneState cloneState;
2022-07-29 00:24:07 -04:00
ScopePtr moduleScope = getModuleScope();
2022-07-29 00:24:07 -04:00
TypePackId returnType = moduleScope->returnType;
std::optional<TypePackId> varargPack = FFlag::DebugLuauDeferredConstraintResolution ? std::nullopt : moduleScope->varargPack;
std::unordered_map<Name, TypeFun>* exportedTypeBindings =
FFlag::DebugLuauDeferredConstraintResolution ? nullptr : &moduleScope->exportedTypeBindings;
returnType = clone(returnType, interfaceTypes, cloneState);
2022-07-29 00:24:07 -04:00
moduleScope->returnType = returnType;
if (varargPack)
{
2022-07-29 00:24:07 -04:00
varargPack = clone(*varargPack, interfaceTypes, cloneState);
moduleScope->varargPack = varargPack;
}
2022-04-14 19:57:43 -04:00
2022-07-21 17:16:54 -04:00
ForceNormal forceNormal{&interfaceTypes};
2022-04-14 19:57:43 -04:00
if (FFlag::LuauLowerBoundsCalculation)
{
normalize(returnType, interfaceTypes, ice);
2022-07-21 17:16:54 -04:00
if (FFlag::LuauForceExportSurfacesToBeNormal)
forceNormal.traverse(returnType);
if (varargPack)
2022-07-21 17:16:54 -04:00
{
normalize(*varargPack, interfaceTypes, ice);
2022-07-21 17:16:54 -04:00
if (FFlag::LuauForceExportSurfacesToBeNormal)
forceNormal.traverse(*varargPack);
}
2022-04-14 19:57:43 -04:00
}
if (exportedTypeBindings)
2022-04-14 19:57:43 -04:00
{
for (auto& [name, tf] : *exportedTypeBindings)
2022-05-19 20:02:24 -04:00
{
tf = clone(tf, interfaceTypes, cloneState);
if (FFlag::LuauLowerBoundsCalculation)
2022-05-19 20:02:24 -04:00
{
normalize(tf.type, interfaceTypes, ice);
if (FFlag::LuauNormalizeFlagIsConservative)
{
// We're about to freeze the memory. We know that the flag is conservative by design. Cyclic tables
// won't be marked normal. If the types aren't normal by now, they never will be.
forceNormal.traverse(tf.type);
2022-07-21 17:16:54 -04:00
for (GenericTypeDefinition param : tf.typeParams)
{
forceNormal.traverse(param.ty);
if (param.defaultValue)
{
normalize(*param.defaultValue, interfaceTypes, ice);
forceNormal.traverse(*param.defaultValue);
}
}
}
2022-05-19 20:02:24 -04:00
}
}
2022-04-14 19:57:43 -04:00
}
for (TypeId ty : returnType)
2022-04-14 19:57:43 -04:00
{
if (get<GenericTypeVar>(follow(ty)))
2022-04-14 19:57:43 -04:00
{
auto t = asMutable(ty);
t->ty = AnyTypeVar{};
t->normal = true;
}
}
2022-04-21 17:44:27 -04:00
for (auto& [name, ty] : declaredGlobals)
2022-03-17 20:46:04 -04:00
{
2022-04-21 17:44:27 -04:00
ty = clone(ty, interfaceTypes, cloneState);
if (FFlag::LuauLowerBoundsCalculation)
2022-07-21 17:16:54 -04:00
{
2022-04-21 17:44:27 -04:00
normalize(ty, interfaceTypes, ice);
2022-07-21 17:16:54 -04:00
if (FFlag::LuauForceExportSurfacesToBeNormal)
forceNormal.traverse(ty);
}
2022-03-17 20:46:04 -04:00
}
freeze(internalTypes);
freeze(interfaceTypes);
2022-05-19 20:02:24 -04:00
}
2022-05-19 20:02:24 -04:00
ScopePtr Module::getModuleScope() const
{
LUAU_ASSERT(!scopes.empty());
return scopes.front().second;
}
} // namespace Luau