Sync to upstream/release/575 (#919)

* `Luau.Analyze.CLI` now has experimental support for concurrent type
checking. Use the option `-jN` where `N` is the number of threads to
spawn.
* Improve typechecking performance by ~17% by making the function
`Luau::follow` much more efficient.
* Tighten up the type of `os.date`
* Removed `ParseOptions::allowTypeAnnotations` and
`ParseOptions::supportContinueStatement`

New solver

* Improve the reliability of function overload resolution
* More work toward supporting parallel type checking
* Fix a bug in inference of `==` and `~=` which would erroneously infer
that the operands were `boolean`
* Better error reporting when `for...in` loops are used incorrectly.

CodeGen

* Fix unwind registration when libunwind is used on Linux
* Fixed replaced IR instruction use count
* Convert X64 unwind info generation to standard prologue
* Implement A64 unwind info support for Dwarf2
* Live in/out data for linear blocks is now created
* Add side-exit VM register requirements to the IR dump
* Reuse ConstPropState between block chains 
* Remove redundant base update

---------

Co-authored-by: Arseny Kapoulkine <arseny.kapoulkine@gmail.com>
Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com>
This commit is contained in:
Andy Friesen 2023-05-05 14:52:49 -07:00 committed by GitHub
parent 5891de6724
commit 8453570658
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
56 changed files with 1920 additions and 1058 deletions

View File

@ -107,6 +107,11 @@ struct FunctionCallConstraint
TypePackId result;
class AstExprCall* callSite;
std::vector<std::optional<TypeId>> discriminantTypes;
// When we dispatch this constraint, we update the key at this map to record
// the overload that we selected.
DenseHashMap<const AstNode*, TypeId>* astOriginalCallTypes;
DenseHashMap<const AstNode*, TypeId>* astOverloadResolvedTypes;
};
// result ~ prim ExpectedType SomeSingletonType MultitonType

View File

@ -28,6 +28,7 @@ struct FileResolver;
struct ModuleResolver;
struct ParseResult;
struct HotComment;
struct BuildQueueItem;
struct LoadDefinitionFileResult
{
@ -171,7 +172,18 @@ struct Frontend
LoadDefinitionFileResult loadDefinitionFile(GlobalTypes& globals, ScopePtr targetScope, std::string_view source, const std::string& packageName,
bool captureComments, bool typeCheckForAutocomplete = false);
// Batch module checking. Queue modules and check them together, retrieve results with 'getCheckResult'
// If provided, 'executeTask' function is allowed to call the 'task' function on any thread and return without waiting for 'task' to complete
void queueModuleCheck(const std::vector<ModuleName>& names);
void queueModuleCheck(const ModuleName& name);
std::vector<ModuleName> checkQueuedModules(std::optional<FrontendOptions> optionOverride = {},
std::function<void(std::function<void()> task)> executeTask = {}, std::function<void(size_t done, size_t total)> progress = {});
std::optional<CheckResult> getCheckResult(const ModuleName& name, bool accumulateNested, bool forAutocomplete = false);
private:
CheckResult check_DEPRECATED(const ModuleName& name, std::optional<FrontendOptions> optionOverride = {});
struct TypeCheckLimits
{
std::optional<double> finishTime;
@ -185,7 +197,14 @@ private:
std::pair<SourceNode*, SourceModule*> getSourceNode(const ModuleName& name);
SourceModule parse(const ModuleName& name, std::string_view src, const ParseOptions& parseOptions);
bool parseGraph(std::vector<ModuleName>& buildQueue, const ModuleName& root, bool forAutocomplete);
bool parseGraph(
std::vector<ModuleName>& buildQueue, const ModuleName& root, bool forAutocomplete, std::function<bool(const ModuleName&)> canSkip = {});
void addBuildQueueItems(std::vector<BuildQueueItem>& items, std::vector<ModuleName>& buildQueue, bool cycleDetected,
std::unordered_set<Luau::ModuleName>& seen, const FrontendOptions& frontendOptions);
void checkBuildQueueItem(BuildQueueItem& item);
void checkBuildQueueItems(std::vector<BuildQueueItem>& items);
void recordItemResult(const BuildQueueItem& item);
static LintResult classifyLints(const std::vector<LintWarning>& warnings, const Config& config);
@ -212,11 +231,13 @@ public:
InternalErrorReporter iceHandler;
std::function<void(const ModuleName& name, const ScopePtr& scope, bool forAutocomplete)> prepareModuleScope;
std::unordered_map<ModuleName, SourceNode> sourceNodes;
std::unordered_map<ModuleName, SourceModule> sourceModules;
std::unordered_map<ModuleName, std::shared_ptr<SourceNode>> sourceNodes;
std::unordered_map<ModuleName, std::shared_ptr<SourceModule>> sourceModules;
std::unordered_map<ModuleName, RequireTraceResult> requireTrace;
Stats stats = {};
std::vector<ModuleName> moduleQueue;
};
ModulePtr check(const SourceModule& sourceModule, const std::vector<RequireCycle>& requireCycles, NotNull<BuiltinTypes> builtinTypes,

View File

@ -226,10 +226,6 @@ struct NormalizedType
NormalizedClassType classes;
// The class part of the type.
// Each element of this set is a class, and none of the classes are subclasses of each other.
TypeIds DEPRECATED_classes;
// The error part of the type.
// This type is either never or the error type.
TypeId errors;
@ -333,8 +329,6 @@ public:
// ------- Normalizing intersections
TypeId intersectionOfTops(TypeId here, TypeId there);
TypeId intersectionOfBools(TypeId here, TypeId there);
void DEPRECATED_intersectClasses(TypeIds& heres, const TypeIds& theres);
void DEPRECATED_intersectClassesWithClass(TypeIds& heres, TypeId there);
void intersectClasses(NormalizedClassType& heres, const NormalizedClassType& theres);
void intersectClassesWithClass(NormalizedClassType& heres, TypeId there);
void intersectStrings(NormalizedStringType& here, const NormalizedStringType& there);

View File

@ -694,7 +694,7 @@ bool areEqual(SeenSet& seen, const Type& lhs, const Type& rhs);
// Follow BoundTypes until we get to something real
TypeId follow(TypeId t);
TypeId follow(TypeId t, std::function<TypeId(TypeId)> mapper);
TypeId follow(TypeId t, const void* context, TypeId (*mapper)(const void*, TypeId));
std::vector<TypeId> flattenIntersection(TypeId ty);

View File

@ -169,7 +169,7 @@ using SeenSet = std::set<std::pair<const void*, const void*>>;
bool areEqual(SeenSet& seen, const TypePackVar& lhs, const TypePackVar& rhs);
TypePackId follow(TypePackId tp);
TypePackId follow(TypePackId tp, std::function<TypePackId(TypePackId)> mapper);
TypePackId follow(TypePackId t, const void* context, TypePackId (*mapper)(const void*, TypePackId));
size_t size(TypePackId tp, TxnLog* log = nullptr);
bool finite(TypePackId tp, TxnLog* log = nullptr);

View File

@ -163,5 +163,6 @@ private:
void promoteTypeLevels(TxnLog& log, const TypeArena* arena, TypeLevel minLevel, Scope* outerScope, bool useScope, TypePackId tp);
std::optional<TypeError> hasUnificationTooComplex(const ErrorVec& errors);
std::optional<TypeError> hasCountMismatch(const ErrorVec& errors);
} // namespace Luau

View File

@ -18,7 +18,6 @@
LUAU_FASTINT(LuauCheckRecursionLimit);
LUAU_FASTFLAG(DebugLuauMagicTypes);
LUAU_FASTFLAG(LuauNegatedClassTypes);
namespace Luau
{
@ -1016,7 +1015,7 @@ static bool isMetamethod(const Name& name)
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareClass* declaredClass)
{
std::optional<TypeId> superTy = FFlag::LuauNegatedClassTypes ? std::make_optional(builtinTypes->classType) : std::nullopt;
std::optional<TypeId> superTy = std::make_optional(builtinTypes->classType);
if (declaredClass->superName)
{
Name superName = Name(declaredClass->superName->value);
@ -1420,6 +1419,8 @@ InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExprCa
rets,
call,
std::move(discriminantTypes),
&module->astOriginalCallTypes,
&module->astOverloadResolvedTypes,
});
// We force constraints produced by checking function arguments to wait
@ -1772,7 +1773,7 @@ std::tuple<TypeId, TypeId, RefinementId> ConstraintGraphBuilder::checkBinary(
TypeId ty = follow(typeFun->type);
// We're only interested in the root class of any classes.
if (auto ctv = get<ClassType>(ty); !ctv || (FFlag::LuauNegatedClassTypes ? (ctv->parent == builtinTypes->classType) : !ctv->parent))
if (auto ctv = get<ClassType>(ty); !ctv || ctv->parent == builtinTypes->classType)
discriminantTy = ty;
}
@ -1786,8 +1787,10 @@ std::tuple<TypeId, TypeId, RefinementId> ConstraintGraphBuilder::checkBinary(
}
else if (binary->op == AstExprBinary::CompareEq || binary->op == AstExprBinary::CompareNe)
{
TypeId leftType = check(scope, binary->left, ValueContext::RValue, expectedType, true).ty;
TypeId rightType = check(scope, binary->right, ValueContext::RValue, expectedType, true).ty;
// We are checking a binary expression of the form a op b
// Just because a op b is epxected to return a bool, doesn't mean a, b are expected to be bools too
TypeId leftType = check(scope, binary->left, ValueContext::RValue, {}, true).ty;
TypeId rightType = check(scope, binary->right, ValueContext::RValue, {}, true).ty;
RefinementId leftRefinement = nullptr;
if (auto bc = dfg->getBreadcrumb(binary->left))

View File

@ -1172,6 +1172,9 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
else if (auto it = get<IntersectionType>(fn))
fn = collapse(it).value_or(fn);
if (c.callSite)
(*c.astOriginalCallTypes)[c.callSite] = fn;
// We don't support magic __call metamethods.
if (std::optional<TypeId> callMm = findMetatableEntry(builtinTypes, errors, fn, "__call", constraint->location))
{
@ -1219,10 +1222,22 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
TypeId inferredTy = arena->addType(FunctionType{TypeLevel{}, constraint->scope.get(), argsPack, c.result});
std::vector<TypeId> overloads = flattenIntersection(fn);
const NormalizedType* normFn = normalizer->normalize(fn);
if (!normFn)
{
reportError(UnificationTooComplex{}, constraint->location);
return true;
}
// TODO: It would be nice to not need to convert the normalized type back to
// an intersection and flatten it.
TypeId normFnTy = normalizer->typeFromNormal(*normFn);
std::vector<TypeId> overloads = flattenIntersection(normFnTy);
Instantiation inst(TxnLog::empty(), arena, TypeLevel{}, constraint->scope);
std::vector<TypeId> arityMatchingOverloads;
for (TypeId overload : overloads)
{
overload = follow(overload);
@ -1247,8 +1262,17 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
if (const auto& e = hasUnificationTooComplex(u.errors))
reportError(*e);
if (const auto& e = hasCountMismatch(u.errors);
(!e || get<CountMismatch>(*e)->context != CountMismatch::Context::Arg) && get<FunctionType>(*instantiated))
{
arityMatchingOverloads.push_back(*instantiated);
}
if (u.errors.empty())
{
if (c.callSite)
(*c.astOverloadResolvedTypes)[c.callSite] = *instantiated;
// We found a matching overload.
const auto [changedTypes, changedPacks] = u.log.getChanges();
u.log.commit();
@ -1260,6 +1284,15 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
}
}
if (arityMatchingOverloads.size() == 1 && c.callSite)
{
// In the name of better error messages in the type checker, we provide
// it with an instantiated function signature that matched arity, but
// not the requisite subtyping requirements. This makes errors better in
// cases where only one overload fit from an arity perspective.
(*c.astOverloadResolvedTypes)[c.callSite] = arityMatchingOverloads.at(0);
}
// We found no matching overloads.
Unifier u{normalizer, Mode::Strict, constraint->scope, Location{}, Covariant};
u.useScopes = true;
@ -1267,8 +1300,6 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
u.tryUnify(inferredTy, builtinTypes->anyType);
u.tryUnify(fn, builtinTypes->anyType);
LUAU_ASSERT(u.errors.empty()); // unifying with any should never fail
const auto [changedTypes, changedPacks] = u.log.getChanges();
u.log.commit();
@ -2166,13 +2197,24 @@ void ConstraintSolver::unblock(NotNull<const Constraint> progressed)
void ConstraintSolver::unblock(TypeId progressed)
{
if (logger)
logger->popBlock(progressed);
DenseHashSet<TypeId> seen{nullptr};
unblock_(progressed);
while (true)
{
if (seen.find(progressed))
iceReporter.ice("ConstraintSolver::unblock encountered a self-bound type!");
seen.insert(progressed);
if (auto bt = get<BoundType>(progressed))
unblock(bt->boundTo);
if (logger)
logger->popBlock(progressed);
unblock_(progressed);
if (auto bt = get<BoundType>(progressed))
progressed = bt->boundTo;
else
break;
}
}
void ConstraintSolver::unblock(TypePackId progressed)

View File

@ -21,6 +21,9 @@
#include <algorithm>
#include <chrono>
#include <condition_variable>
#include <exception>
#include <mutex>
#include <stdexcept>
#include <string>
@ -34,10 +37,36 @@ LUAU_FASTFLAGVARIABLE(DebugLuauDeferredConstraintResolution, false)
LUAU_FASTFLAGVARIABLE(DebugLuauLogSolverToJson, false)
LUAU_FASTFLAG(LuauRequirePathTrueModuleName)
LUAU_FASTFLAGVARIABLE(DebugLuauReadWriteProperties, false)
LUAU_FASTFLAGVARIABLE(LuauSplitFrontendProcessing, false)
namespace Luau
{
struct BuildQueueItem
{
ModuleName name;
ModuleName humanReadableName;
// Parameters
std::shared_ptr<SourceNode> sourceNode;
std::shared_ptr<SourceModule> sourceModule;
Config config;
ScopePtr environmentScope;
std::vector<RequireCycle> requireCycles;
FrontendOptions options;
bool recordJsonLog = false;
// Queue state
std::vector<size_t> reverseDeps;
int dirtyDependencies = 0;
bool processing = false;
// Result
std::exception_ptr exception;
ModulePtr module;
Frontend::Stats stats;
};
std::optional<Mode> parseMode(const std::vector<HotComment>& hotcomments)
{
for (const HotComment& hc : hotcomments)
@ -220,7 +249,7 @@ namespace
{
static ErrorVec accumulateErrors(
const std::unordered_map<ModuleName, SourceNode>& sourceNodes, ModuleResolver& moduleResolver, const ModuleName& name)
const std::unordered_map<ModuleName, std::shared_ptr<SourceNode>>& sourceNodes, ModuleResolver& moduleResolver, const ModuleName& name)
{
std::unordered_set<ModuleName> seen;
std::vector<ModuleName> queue{name};
@ -240,7 +269,7 @@ static ErrorVec accumulateErrors(
if (it == sourceNodes.end())
continue;
const SourceNode& sourceNode = it->second;
const SourceNode& sourceNode = *it->second;
queue.insert(queue.end(), sourceNode.requireSet.begin(), sourceNode.requireSet.end());
// FIXME: If a module has a syntax error, we won't be able to re-report it here.
@ -285,8 +314,8 @@ static void filterLintOptions(LintOptions& lintOptions, const std::vector<HotCom
// For each such path, record the full path and the location of the require in the starting module.
// Note that this is O(V^2) for a fully connected graph and produces O(V) paths of length O(V)
// However, when the graph is acyclic, this is O(V), as well as when only the first cycle is needed (stopAtFirst=true)
std::vector<RequireCycle> getRequireCycles(
const FileResolver* resolver, const std::unordered_map<ModuleName, SourceNode>& sourceNodes, const SourceNode* start, bool stopAtFirst = false)
std::vector<RequireCycle> getRequireCycles(const FileResolver* resolver,
const std::unordered_map<ModuleName, std::shared_ptr<SourceNode>>& sourceNodes, const SourceNode* start, bool stopAtFirst = false)
{
std::vector<RequireCycle> result;
@ -302,7 +331,7 @@ std::vector<RequireCycle> getRequireCycles(
if (dit == sourceNodes.end())
continue;
stack.push_back(&dit->second);
stack.push_back(dit->second.get());
while (!stack.empty())
{
@ -343,7 +372,7 @@ std::vector<RequireCycle> getRequireCycles(
auto rit = sourceNodes.find(reqName);
if (rit != sourceNodes.end())
stack.push_back(&rit->second);
stack.push_back(rit->second.get());
}
}
}
@ -389,6 +418,52 @@ Frontend::Frontend(FileResolver* fileResolver, ConfigResolver* configResolver, c
}
CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOptions> optionOverride)
{
if (!FFlag::LuauSplitFrontendProcessing)
return check_DEPRECATED(name, optionOverride);
LUAU_TIMETRACE_SCOPE("Frontend::check", "Frontend");
LUAU_TIMETRACE_ARGUMENT("name", name.c_str());
FrontendOptions frontendOptions = optionOverride.value_or(options);
if (std::optional<CheckResult> result = getCheckResult(name, true, frontendOptions.forAutocomplete))
return std::move(*result);
std::vector<ModuleName> buildQueue;
bool cycleDetected = parseGraph(buildQueue, name, frontendOptions.forAutocomplete);
std::unordered_set<Luau::ModuleName> seen;
std::vector<BuildQueueItem> buildQueueItems;
addBuildQueueItems(buildQueueItems, buildQueue, cycleDetected, seen, frontendOptions);
LUAU_ASSERT(!buildQueueItems.empty());
if (FFlag::DebugLuauLogSolverToJson)
{
LUAU_ASSERT(buildQueueItems.back().name == name);
buildQueueItems.back().recordJsonLog = true;
}
checkBuildQueueItems(buildQueueItems);
// Collect results only for checked modules, 'getCheckResult' produces a different result
CheckResult checkResult;
for (const BuildQueueItem& item : buildQueueItems)
{
if (item.module->timeout)
checkResult.timeoutHits.push_back(item.name);
checkResult.errors.insert(checkResult.errors.end(), item.module->errors.begin(), item.module->errors.end());
if (item.name == name)
checkResult.lintResult = item.module->lintResult;
}
return checkResult;
}
CheckResult Frontend::check_DEPRECATED(const ModuleName& name, std::optional<FrontendOptions> optionOverride)
{
LUAU_TIMETRACE_SCOPE("Frontend::check", "Frontend");
LUAU_TIMETRACE_ARGUMENT("name", name.c_str());
@ -399,7 +474,7 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
FrontendModuleResolver& resolver = frontendOptions.forAutocomplete ? moduleResolverForAutocomplete : moduleResolver;
auto it = sourceNodes.find(name);
if (it != sourceNodes.end() && !it->second.hasDirtyModule(frontendOptions.forAutocomplete))
if (it != sourceNodes.end() && !it->second->hasDirtyModule(frontendOptions.forAutocomplete))
{
// No recheck required.
ModulePtr module = resolver.getModule(name);
@ -421,13 +496,13 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
for (const ModuleName& moduleName : buildQueue)
{
LUAU_ASSERT(sourceNodes.count(moduleName));
SourceNode& sourceNode = sourceNodes[moduleName];
SourceNode& sourceNode = *sourceNodes[moduleName];
if (!sourceNode.hasDirtyModule(frontendOptions.forAutocomplete))
continue;
LUAU_ASSERT(sourceModules.count(moduleName));
SourceModule& sourceModule = sourceModules[moduleName];
SourceModule& sourceModule = *sourceModules[moduleName];
const Config& config = configResolver->getConfig(moduleName);
@ -583,7 +658,241 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
return checkResult;
}
bool Frontend::parseGraph(std::vector<ModuleName>& buildQueue, const ModuleName& root, bool forAutocomplete)
void Frontend::queueModuleCheck(const std::vector<ModuleName>& names)
{
moduleQueue.insert(moduleQueue.end(), names.begin(), names.end());
}
void Frontend::queueModuleCheck(const ModuleName& name)
{
moduleQueue.push_back(name);
}
std::vector<ModuleName> Frontend::checkQueuedModules(std::optional<FrontendOptions> optionOverride,
std::function<void(std::function<void()> task)> executeTask, std::function<void(size_t done, size_t total)> progress)
{
FrontendOptions frontendOptions = optionOverride.value_or(options);
// By taking data into locals, we make sure queue is cleared at the end, even if an ICE or a different exception is thrown
std::vector<ModuleName> currModuleQueue;
std::swap(currModuleQueue, moduleQueue);
std::unordered_set<Luau::ModuleName> seen;
std::vector<BuildQueueItem> buildQueueItems;
for (const ModuleName& name : currModuleQueue)
{
if (seen.count(name))
continue;
if (!isDirty(name, frontendOptions.forAutocomplete))
{
seen.insert(name);
continue;
}
std::vector<ModuleName> queue;
bool cycleDetected = parseGraph(queue, name, frontendOptions.forAutocomplete, [&seen](const ModuleName& name) {
return seen.count(name);
});
addBuildQueueItems(buildQueueItems, queue, cycleDetected, seen, frontendOptions);
}
if (buildQueueItems.empty())
return {};
// We need a mapping from modules to build queue slots
std::unordered_map<ModuleName, size_t> moduleNameToQueue;
for (size_t i = 0; i < buildQueueItems.size(); i++)
{
BuildQueueItem& item = buildQueueItems[i];
moduleNameToQueue[item.name] = i;
}
// Default task execution is single-threaded and immediate
if (!executeTask)
{
executeTask = [](std::function<void()> task) {
task();
};
}
std::mutex mtx;
std::condition_variable cv;
std::vector<size_t> readyQueueItems;
size_t processing = 0;
size_t remaining = buildQueueItems.size();
auto itemTask = [&](size_t i) {
BuildQueueItem& item = buildQueueItems[i];
try
{
checkBuildQueueItem(item);
}
catch (...)
{
item.exception = std::current_exception();
}
{
std::unique_lock guard(mtx);
readyQueueItems.push_back(i);
}
cv.notify_one();
};
auto sendItemTask = [&](size_t i) {
BuildQueueItem& item = buildQueueItems[i];
item.processing = true;
processing++;
executeTask([&itemTask, i]() {
itemTask(i);
});
};
auto sendCycleItemTask = [&] {
for (size_t i = 0; i < buildQueueItems.size(); i++)
{
BuildQueueItem& item = buildQueueItems[i];
if (!item.processing)
{
sendItemTask(i);
break;
}
}
};
// In a first pass, check modules that have no dependencies and record info of those modules that wait
for (size_t i = 0; i < buildQueueItems.size(); i++)
{
BuildQueueItem& item = buildQueueItems[i];
for (const ModuleName& dep : item.sourceNode->requireSet)
{
if (auto it = sourceNodes.find(dep); it != sourceNodes.end())
{
if (it->second->hasDirtyModule(frontendOptions.forAutocomplete))
{
item.dirtyDependencies++;
buildQueueItems[moduleNameToQueue[dep]].reverseDeps.push_back(i);
}
}
}
if (item.dirtyDependencies == 0)
sendItemTask(i);
}
// Not a single item was found, a cycle in the graph was hit
if (processing == 0)
sendCycleItemTask();
std::vector<size_t> nextItems;
while (remaining != 0)
{
{
std::unique_lock guard(mtx);
// If nothing is ready yet, wait
if (readyQueueItems.empty())
{
cv.wait(guard, [&readyQueueItems] {
return !readyQueueItems.empty();
});
}
// Handle checked items
for (size_t i : readyQueueItems)
{
const BuildQueueItem& item = buildQueueItems[i];
recordItemResult(item);
// Notify items that were waiting for this dependency
for (size_t reverseDep : item.reverseDeps)
{
BuildQueueItem& reverseDepItem = buildQueueItems[reverseDep];
LUAU_ASSERT(reverseDepItem.dirtyDependencies != 0);
reverseDepItem.dirtyDependencies--;
// In case of a module cycle earlier, check if unlocked an item that was already processed
if (!reverseDepItem.processing && reverseDepItem.dirtyDependencies == 0)
nextItems.push_back(reverseDep);
}
}
LUAU_ASSERT(processing >= readyQueueItems.size());
processing -= readyQueueItems.size();
LUAU_ASSERT(remaining >= readyQueueItems.size());
remaining -= readyQueueItems.size();
readyQueueItems.clear();
}
if (progress)
progress(buildQueueItems.size() - remaining, buildQueueItems.size());
// Items cannot be submitted while holding the lock
for (size_t i : nextItems)
sendItemTask(i);
nextItems.clear();
// If we aren't done, but don't have anything processing, we hit a cycle
if (remaining != 0 && processing == 0)
sendCycleItemTask();
}
std::vector<ModuleName> checkedModules;
checkedModules.reserve(buildQueueItems.size());
for (size_t i = 0; i < buildQueueItems.size(); i++)
checkedModules.push_back(std::move(buildQueueItems[i].name));
return checkedModules;
}
std::optional<CheckResult> Frontend::getCheckResult(const ModuleName& name, bool accumulateNested, bool forAutocomplete)
{
auto it = sourceNodes.find(name);
if (it == sourceNodes.end() || it->second->hasDirtyModule(forAutocomplete))
return std::nullopt;
auto& resolver = forAutocomplete ? moduleResolverForAutocomplete : moduleResolver;
ModulePtr module = resolver.getModule(name);
if (module == nullptr)
throw InternalCompilerError("Frontend does not have module: " + name, name);
CheckResult checkResult;
if (module->timeout)
checkResult.timeoutHits.push_back(name);
if (accumulateNested)
checkResult.errors = accumulateErrors(sourceNodes, resolver, name);
else
checkResult.errors.insert(checkResult.errors.end(), module->errors.begin(), module->errors.end());
// Get lint result only for top checked module
checkResult.lintResult = module->lintResult;
return checkResult;
}
bool Frontend::parseGraph(
std::vector<ModuleName>& buildQueue, const ModuleName& root, bool forAutocomplete, std::function<bool(const ModuleName&)> canSkip)
{
LUAU_TIMETRACE_SCOPE("Frontend::parseGraph", "Frontend");
LUAU_TIMETRACE_ARGUMENT("root", root.c_str());
@ -654,14 +963,18 @@ bool Frontend::parseGraph(std::vector<ModuleName>& buildQueue, const ModuleName&
// this relies on the fact that markDirty marks reverse-dependencies dirty as well
// thus if a node is not dirty, all its transitive deps aren't dirty, which means that they won't ever need
// to be built, *and* can't form a cycle with any nodes we did process.
if (!it->second.hasDirtyModule(forAutocomplete))
if (!it->second->hasDirtyModule(forAutocomplete))
continue;
// This module might already be in the outside build queue
if (canSkip && canSkip(dep))
continue;
// note: this check is technically redundant *except* that getSourceNode has somewhat broken memoization
// calling getSourceNode twice in succession will reparse the file, since getSourceNode leaves dirty flag set
if (seen.contains(&it->second))
if (seen.contains(it->second.get()))
{
stack.push_back(&it->second);
stack.push_back(it->second.get());
continue;
}
}
@ -681,6 +994,210 @@ bool Frontend::parseGraph(std::vector<ModuleName>& buildQueue, const ModuleName&
return cyclic;
}
void Frontend::addBuildQueueItems(std::vector<BuildQueueItem>& items, std::vector<ModuleName>& buildQueue, bool cycleDetected,
std::unordered_set<Luau::ModuleName>& seen, const FrontendOptions& frontendOptions)
{
LUAU_ASSERT(FFlag::LuauSplitFrontendProcessing);
for (const ModuleName& moduleName : buildQueue)
{
if (seen.count(moduleName))
continue;
seen.insert(moduleName);
LUAU_ASSERT(sourceNodes.count(moduleName));
std::shared_ptr<SourceNode>& sourceNode = sourceNodes[moduleName];
if (!sourceNode->hasDirtyModule(frontendOptions.forAutocomplete))
continue;
LUAU_ASSERT(sourceModules.count(moduleName));
std::shared_ptr<SourceModule>& sourceModule = sourceModules[moduleName];
BuildQueueItem data{moduleName, fileResolver->getHumanReadableModuleName(moduleName), sourceNode, sourceModule};
data.config = configResolver->getConfig(moduleName);
data.environmentScope = getModuleEnvironment(*sourceModule, data.config, frontendOptions.forAutocomplete);
Mode mode = sourceModule->mode.value_or(data.config.mode);
// in NoCheck mode we only need to compute the value of .cyclic for typeck
// in the future we could replace toposort with an algorithm that can flag cyclic nodes by itself
// however, for now getRequireCycles isn't expensive in practice on the cases we care about, and long term
// all correct programs must be acyclic so this code triggers rarely
if (cycleDetected)
data.requireCycles = getRequireCycles(fileResolver, sourceNodes, sourceNode.get(), mode == Mode::NoCheck);
data.options = frontendOptions;
// This is used by the type checker to replace the resulting type of cyclic modules with any
sourceModule->cyclic = !data.requireCycles.empty();
items.push_back(std::move(data));
}
}
void Frontend::checkBuildQueueItem(BuildQueueItem& item)
{
LUAU_ASSERT(FFlag::LuauSplitFrontendProcessing);
SourceNode& sourceNode = *item.sourceNode;
const SourceModule& sourceModule = *item.sourceModule;
const Config& config = item.config;
Mode mode = sourceModule.mode.value_or(config.mode);
ScopePtr environmentScope = item.environmentScope;
double timestamp = getTimestamp();
const std::vector<RequireCycle>& requireCycles = item.requireCycles;
if (item.options.forAutocomplete)
{
double autocompleteTimeLimit = FInt::LuauAutocompleteCheckTimeoutMs / 1000.0;
// The autocomplete typecheck is always in strict mode with DM awareness
// to provide better type information for IDE features
TypeCheckLimits typeCheckLimits;
if (autocompleteTimeLimit != 0.0)
typeCheckLimits.finishTime = TimeTrace::getClock() + autocompleteTimeLimit;
else
typeCheckLimits.finishTime = std::nullopt;
// TODO: This is a dirty ad hoc solution for autocomplete timeouts
// We are trying to dynamically adjust our existing limits to lower total typechecking time under the limit
// so that we'll have type information for the whole file at lower quality instead of a full abort in the middle
if (FInt::LuauTarjanChildLimit > 0)
typeCheckLimits.instantiationChildLimit = std::max(1, int(FInt::LuauTarjanChildLimit * sourceNode.autocompleteLimitsMult));
else
typeCheckLimits.instantiationChildLimit = std::nullopt;
if (FInt::LuauTypeInferIterationLimit > 0)
typeCheckLimits.unifierIterationLimit = std::max(1, int(FInt::LuauTypeInferIterationLimit * sourceNode.autocompleteLimitsMult));
else
typeCheckLimits.unifierIterationLimit = std::nullopt;
ModulePtr moduleForAutocomplete = check(sourceModule, Mode::Strict, requireCycles, environmentScope, /*forAutocomplete*/ true,
/*recordJsonLog*/ false, typeCheckLimits);
double duration = getTimestamp() - timestamp;
if (moduleForAutocomplete->timeout)
sourceNode.autocompleteLimitsMult = sourceNode.autocompleteLimitsMult / 2.0;
else if (duration < autocompleteTimeLimit / 2.0)
sourceNode.autocompleteLimitsMult = std::min(sourceNode.autocompleteLimitsMult * 2.0, 1.0);
item.stats.timeCheck += duration;
item.stats.filesStrict += 1;
item.module = moduleForAutocomplete;
return;
}
ModulePtr module = check(sourceModule, mode, requireCycles, environmentScope, /*forAutocomplete*/ false, item.recordJsonLog, {});
item.stats.timeCheck += getTimestamp() - timestamp;
item.stats.filesStrict += mode == Mode::Strict;
item.stats.filesNonstrict += mode == Mode::Nonstrict;
if (module == nullptr)
throw InternalCompilerError("Frontend::check produced a nullptr module for " + item.name, item.name);
if (FFlag::DebugLuauDeferredConstraintResolution && mode == Mode::NoCheck)
module->errors.clear();
if (item.options.runLintChecks)
{
LUAU_TIMETRACE_SCOPE("lint", "Frontend");
LintOptions lintOptions = item.options.enabledLintWarnings.value_or(config.enabledLint);
filterLintOptions(lintOptions, sourceModule.hotcomments, mode);
double timestamp = getTimestamp();
std::vector<LintWarning> warnings =
Luau::lint(sourceModule.root, *sourceModule.names, environmentScope, module.get(), sourceModule.hotcomments, lintOptions);
item.stats.timeLint += getTimestamp() - timestamp;
module->lintResult = classifyLints(warnings, config);
}
if (!item.options.retainFullTypeGraphs)
{
// copyErrors needs to allocate into interfaceTypes as it copies
// types out of internalTypes, so we unfreeze it here.
unfreeze(module->interfaceTypes);
copyErrors(module->errors, module->interfaceTypes);
freeze(module->interfaceTypes);
module->internalTypes.clear();
module->astTypes.clear();
module->astTypePacks.clear();
module->astExpectedTypes.clear();
module->astOriginalCallTypes.clear();
module->astOverloadResolvedTypes.clear();
module->astResolvedTypes.clear();
module->astOriginalResolvedTypes.clear();
module->astResolvedTypePacks.clear();
module->astScopes.clear();
module->scopes.clear();
}
if (mode != Mode::NoCheck)
{
for (const RequireCycle& cyc : requireCycles)
{
TypeError te{cyc.location, item.name, ModuleHasCyclicDependency{cyc.path}};
module->errors.push_back(te);
}
}
ErrorVec parseErrors;
for (const ParseError& pe : sourceModule.parseErrors)
parseErrors.push_back(TypeError{pe.getLocation(), item.name, SyntaxError{pe.what()}});
module->errors.insert(module->errors.begin(), parseErrors.begin(), parseErrors.end());
item.module = module;
}
void Frontend::checkBuildQueueItems(std::vector<BuildQueueItem>& items)
{
LUAU_ASSERT(FFlag::LuauSplitFrontendProcessing);
for (BuildQueueItem& item : items)
{
checkBuildQueueItem(item);
recordItemResult(item);
}
}
void Frontend::recordItemResult(const BuildQueueItem& item)
{
if (item.exception)
std::rethrow_exception(item.exception);
if (item.options.forAutocomplete)
{
moduleResolverForAutocomplete.setModule(item.name, item.module);
item.sourceNode->dirtyModuleForAutocomplete = false;
}
else
{
moduleResolver.setModule(item.name, item.module);
item.sourceNode->dirtyModule = false;
}
stats.timeCheck += item.stats.timeCheck;
stats.timeLint += item.stats.timeLint;
stats.filesStrict += item.stats.filesStrict;
stats.filesNonstrict += item.stats.filesNonstrict;
}
ScopePtr Frontend::getModuleEnvironment(const SourceModule& module, const Config& config, bool forAutocomplete) const
{
ScopePtr result;
@ -711,7 +1228,7 @@ ScopePtr Frontend::getModuleEnvironment(const SourceModule& module, const Config
bool Frontend::isDirty(const ModuleName& name, bool forAutocomplete) const
{
auto it = sourceNodes.find(name);
return it == sourceNodes.end() || it->second.hasDirtyModule(forAutocomplete);
return it == sourceNodes.end() || it->second->hasDirtyModule(forAutocomplete);
}
/*
@ -728,7 +1245,7 @@ void Frontend::markDirty(const ModuleName& name, std::vector<ModuleName>* marked
std::unordered_map<ModuleName, std::vector<ModuleName>> reverseDeps;
for (const auto& module : sourceNodes)
{
for (const auto& dep : module.second.requireSet)
for (const auto& dep : module.second->requireSet)
reverseDeps[dep].push_back(module.first);
}
@ -740,7 +1257,7 @@ void Frontend::markDirty(const ModuleName& name, std::vector<ModuleName>* marked
queue.pop_back();
LUAU_ASSERT(sourceNodes.count(next) > 0);
SourceNode& sourceNode = sourceNodes[next];
SourceNode& sourceNode = *sourceNodes[next];
if (markedDirty)
markedDirty->push_back(next);
@ -766,7 +1283,7 @@ SourceModule* Frontend::getSourceModule(const ModuleName& moduleName)
{
auto it = sourceModules.find(moduleName);
if (it != sourceModules.end())
return &it->second;
return it->second.get();
else
return nullptr;
}
@ -901,22 +1418,22 @@ ModulePtr Frontend::check(const SourceModule& sourceModule, Mode mode, std::vect
// Read AST into sourceModules if necessary. Trace require()s. Report parse errors.
std::pair<SourceNode*, SourceModule*> Frontend::getSourceNode(const ModuleName& name)
{
LUAU_TIMETRACE_SCOPE("Frontend::getSourceNode", "Frontend");
LUAU_TIMETRACE_ARGUMENT("name", name.c_str());
auto it = sourceNodes.find(name);
if (it != sourceNodes.end() && !it->second.hasDirtySourceModule())
if (it != sourceNodes.end() && !it->second->hasDirtySourceModule())
{
auto moduleIt = sourceModules.find(name);
if (moduleIt != sourceModules.end())
return {&it->second, &moduleIt->second};
return {it->second.get(), moduleIt->second.get()};
else
{
LUAU_ASSERT(!"Everything in sourceNodes should also be in sourceModules");
return {&it->second, nullptr};
return {it->second.get(), nullptr};
}
}
LUAU_TIMETRACE_SCOPE("Frontend::getSourceNode", "Frontend");
LUAU_TIMETRACE_ARGUMENT("name", name.c_str());
double timestamp = getTimestamp();
std::optional<SourceCode> source = fileResolver->readSource(name);
@ -939,30 +1456,37 @@ std::pair<SourceNode*, SourceModule*> Frontend::getSourceNode(const ModuleName&
RequireTraceResult& require = requireTrace[name];
require = traceRequires(fileResolver, result.root, name);
SourceNode& sourceNode = sourceNodes[name];
SourceModule& sourceModule = sourceModules[name];
std::shared_ptr<SourceNode>& sourceNode = sourceNodes[name];
sourceModule = std::move(result);
sourceModule.environmentName = environmentName;
if (!sourceNode)
sourceNode = std::make_shared<SourceNode>();
sourceNode.name = sourceModule.name;
sourceNode.humanReadableName = sourceModule.humanReadableName;
sourceNode.requireSet.clear();
sourceNode.requireLocations.clear();
sourceNode.dirtySourceModule = false;
std::shared_ptr<SourceModule>& sourceModule = sourceModules[name];
if (!sourceModule)
sourceModule = std::make_shared<SourceModule>();
*sourceModule = std::move(result);
sourceModule->environmentName = environmentName;
sourceNode->name = sourceModule->name;
sourceNode->humanReadableName = sourceModule->humanReadableName;
sourceNode->requireSet.clear();
sourceNode->requireLocations.clear();
sourceNode->dirtySourceModule = false;
if (it == sourceNodes.end())
{
sourceNode.dirtyModule = true;
sourceNode.dirtyModuleForAutocomplete = true;
sourceNode->dirtyModule = true;
sourceNode->dirtyModuleForAutocomplete = true;
}
for (const auto& [moduleName, location] : require.requireList)
sourceNode.requireSet.insert(moduleName);
sourceNode->requireSet.insert(moduleName);
sourceNode.requireLocations = require.requireList;
sourceNode->requireLocations = require.requireList;
return {&sourceNode, &sourceModule};
return {sourceNode.get(), sourceModule.get()};
}
/** Try to parse a source file into a SourceModule.

View File

@ -17,8 +17,6 @@ LUAU_FASTFLAGVARIABLE(DebugLuauCheckNormalizeInvariant, false)
// This could theoretically be 2000 on amd64, but x86 requires this.
LUAU_FASTINTVARIABLE(LuauNormalizeIterationLimit, 1200);
LUAU_FASTINTVARIABLE(LuauNormalizeCacheLimit, 100000);
LUAU_FASTFLAGVARIABLE(LuauNegatedClassTypes, false);
LUAU_FASTFLAGVARIABLE(LuauNegatedTableTypes, false);
LUAU_FASTFLAGVARIABLE(LuauNormalizeBlockedTypes, false);
LUAU_FASTFLAGVARIABLE(LuauNormalizeMetatableFixes, false);
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
@ -232,15 +230,8 @@ NormalizedType::NormalizedType(NotNull<BuiltinTypes> builtinTypes)
static bool isShallowInhabited(const NormalizedType& norm)
{
bool inhabitedClasses;
if (FFlag::LuauNegatedClassTypes)
inhabitedClasses = !norm.classes.isNever();
else
inhabitedClasses = !norm.DEPRECATED_classes.empty();
// This test is just a shallow check, for example it returns `true` for `{ p : never }`
return !get<NeverType>(norm.tops) || !get<NeverType>(norm.booleans) || inhabitedClasses || !get<NeverType>(norm.errors) ||
return !get<NeverType>(norm.tops) || !get<NeverType>(norm.booleans) || !norm.classes.isNever() || !get<NeverType>(norm.errors) ||
!get<NeverType>(norm.nils) || !get<NeverType>(norm.numbers) || !norm.strings.isNever() || !get<NeverType>(norm.threads) ||
!norm.functions.isNever() || !norm.tables.empty() || !norm.tyvars.empty();
}
@ -257,14 +248,8 @@ bool Normalizer::isInhabited(const NormalizedType* norm, std::unordered_set<Type
if (!norm)
return true;
bool inhabitedClasses;
if (FFlag::LuauNegatedClassTypes)
inhabitedClasses = !norm->classes.isNever();
else
inhabitedClasses = !norm->DEPRECATED_classes.empty();
if (!get<NeverType>(norm->tops) || !get<NeverType>(norm->booleans) || !get<NeverType>(norm->errors) || !get<NeverType>(norm->nils) ||
!get<NeverType>(norm->numbers) || !get<NeverType>(norm->threads) || inhabitedClasses || !norm->strings.isNever() ||
!get<NeverType>(norm->numbers) || !get<NeverType>(norm->threads) || !norm->classes.isNever() || !norm->strings.isNever() ||
!norm->functions.isNever())
return true;
@ -466,7 +451,7 @@ static bool areNormalizedTables(const TypeIds& tys)
if (!pt)
return false;
if (pt->type == PrimitiveType::Table && FFlag::LuauNegatedTableTypes)
if (pt->type == PrimitiveType::Table)
continue;
return false;
@ -475,14 +460,6 @@ static bool areNormalizedTables(const TypeIds& tys)
return true;
}
static bool areNormalizedClasses(const TypeIds& tys)
{
for (TypeId ty : tys)
if (!get<ClassType>(ty))
return false;
return true;
}
static bool areNormalizedClasses(const NormalizedClassType& tys)
{
for (const auto& [ty, negations] : tys.classes)
@ -567,7 +544,6 @@ static void assertInvariant(const NormalizedType& norm)
LUAU_ASSERT(isNormalizedTop(norm.tops));
LUAU_ASSERT(isNormalizedBoolean(norm.booleans));
LUAU_ASSERT(areNormalizedClasses(norm.DEPRECATED_classes));
LUAU_ASSERT(areNormalizedClasses(norm.classes));
LUAU_ASSERT(isNormalizedError(norm.errors));
LUAU_ASSERT(isNormalizedNil(norm.nils));
@ -629,7 +605,6 @@ void Normalizer::clearNormal(NormalizedType& norm)
norm.tops = builtinTypes->neverType;
norm.booleans = builtinTypes->neverType;
norm.classes.resetToNever();
norm.DEPRECATED_classes.clear();
norm.errors = builtinTypes->neverType;
norm.nils = builtinTypes->neverType;
norm.numbers = builtinTypes->neverType;
@ -1253,18 +1228,11 @@ void Normalizer::unionTables(TypeIds& heres, const TypeIds& theres)
{
for (TypeId there : theres)
{
if (FFlag::LuauNegatedTableTypes)
if (there == builtinTypes->tableType)
{
if (there == builtinTypes->tableType)
{
heres.clear();
heres.insert(there);
return;
}
else
{
unionTablesWithTable(heres, there);
}
heres.clear();
heres.insert(there);
return;
}
else
{
@ -1320,10 +1288,7 @@ bool Normalizer::unionNormals(NormalizedType& here, const NormalizedType& there,
}
here.booleans = unionOfBools(here.booleans, there.booleans);
if (FFlag::LuauNegatedClassTypes)
unionClasses(here.classes, there.classes);
else
unionClasses(here.DEPRECATED_classes, there.DEPRECATED_classes);
unionClasses(here.classes, there.classes);
here.errors = (get<NeverType>(there.errors) ? here.errors : there.errors);
here.nils = (get<NeverType>(there.nils) ? here.nils : there.nils);
@ -1414,16 +1379,7 @@ bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, int ignor
else if (get<TableType>(there) || get<MetatableType>(there))
unionTablesWithTable(here.tables, there);
else if (get<ClassType>(there))
{
if (FFlag::LuauNegatedClassTypes)
{
unionClassesWithClass(here.classes, there);
}
else
{
unionClassesWithClass(here.DEPRECATED_classes, there);
}
}
unionClassesWithClass(here.classes, there);
else if (get<ErrorType>(there))
here.errors = there;
else if (const PrimitiveType* ptv = get<PrimitiveType>(there))
@ -1442,7 +1398,7 @@ bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, int ignor
{
here.functions.resetToTop();
}
else if (ptv->type == PrimitiveType::Table && FFlag::LuauNegatedTableTypes)
else if (ptv->type == PrimitiveType::Table)
{
here.tables.clear();
here.tables.insert(there);
@ -1527,36 +1483,29 @@ std::optional<NormalizedType> Normalizer::negateNormal(const NormalizedType& her
result.booleans = builtinTypes->trueType;
}
if (FFlag::LuauNegatedClassTypes)
if (here.classes.isNever())
{
if (here.classes.isNever())
{
resetToTop(builtinTypes, result.classes);
}
else if (isTop(builtinTypes, result.classes))
{
result.classes.resetToNever();
}
else
{
TypeIds rootNegations{};
for (const auto& [hereParent, hereNegations] : here.classes.classes)
{
if (hereParent != builtinTypes->classType)
rootNegations.insert(hereParent);
for (TypeId hereNegation : hereNegations)
unionClassesWithClass(result.classes, hereNegation);
}
if (!rootNegations.empty())
result.classes.pushPair(builtinTypes->classType, rootNegations);
}
resetToTop(builtinTypes, result.classes);
}
else if (isTop(builtinTypes, result.classes))
{
result.classes.resetToNever();
}
else
{
result.DEPRECATED_classes = negateAll(here.DEPRECATED_classes);
TypeIds rootNegations{};
for (const auto& [hereParent, hereNegations] : here.classes.classes)
{
if (hereParent != builtinTypes->classType)
rootNegations.insert(hereParent);
for (TypeId hereNegation : hereNegations)
unionClassesWithClass(result.classes, hereNegation);
}
if (!rootNegations.empty())
result.classes.pushPair(builtinTypes->classType, rootNegations);
}
result.nils = get<NeverType>(here.nils) ? builtinTypes->nilType : builtinTypes->neverType;
@ -1584,15 +1533,12 @@ std::optional<NormalizedType> Normalizer::negateNormal(const NormalizedType& her
* types are not runtime-testable. Thus, we prohibit negation of anything
* other than `table` and `never`.
*/
if (FFlag::LuauNegatedTableTypes)
{
if (here.tables.empty())
result.tables.insert(builtinTypes->tableType);
else if (here.tables.size() == 1 && here.tables.front() == builtinTypes->tableType)
result.tables.clear();
else
return std::nullopt;
}
if (here.tables.empty())
result.tables.insert(builtinTypes->tableType);
else if (here.tables.size() == 1 && here.tables.front() == builtinTypes->tableType)
result.tables.clear();
else
return std::nullopt;
// TODO: negating tables
// TODO: negating tyvars?