Sync to upstream/release/540 (#635)

Also adjust benchmark runs to use config=profile and run clang for all benchmarks + gcc for runtime
This commit is contained in:
Arseny Kapoulkine 2022-08-11 14:01:33 -07:00 committed by GitHub
parent 2c40b7661c
commit f7d8ad0774
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
50 changed files with 2193 additions and 432 deletions

View File

@ -13,12 +13,11 @@ on:
jobs:
callgrind:
name: callgrind ${{ matrix.compiler }}
name: callgrind
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04]
compiler: [g++]
benchResultsRepo:
- { name: "luau-lang/benchmark-data", branch: "main" }
@ -31,12 +30,23 @@ jobs:
run: |
sudo apt-get install valgrind
- name: Build Luau
run: CXX=${{ matrix.compiler }} make config=release CALLGRIND=1 luau luau-analyze
- name: Run benchmark (bench)
- name: Build Luau (gcc)
run: |
python bench/bench.py --callgrind --vm "./luau -O2" | tee -a bench-output.txt
CXX=g++ make config=profile luau
cp luau luau-gcc
- name: Build Luau (clang)
run: |
make config=profile clean
CXX=clang++ make config=profile luau luau-analyze
- name: Run benchmark (bench-gcc)
run: |
python bench/bench.py --callgrind --vm "./luau-gcc -O2" | tee -a bench-gcc-output.txt
- name: Run benchmark (bench-clang)
run: |
python bench/bench.py --callgrind --vm "./luau -O2" | tee -a bench-clang-output.txt
- name: Run benchmark (analyze)
run: |
@ -68,13 +78,21 @@ jobs:
token: ${{ secrets.BENCH_GITHUB_TOKEN }}
path: "./gh-pages"
- name: Store results (bench)
- name: Store results (bench-clang)
uses: Roblox/rhysd-github-action-benchmark@v-luau
with:
name: callgrind ${{ matrix.compiler }}
name: callgrind clang
tool: "benchmarkluau"
output-file-path: ./bench-output.txt
external-data-json-path: ./gh-pages/bench.json
output-file-path: ./bench-clang-output.txt
external-data-json-path: ./gh-pages/bench-clang.json
- name: Store results (bench-gcc)
uses: Roblox/rhysd-github-action-benchmark@v-luau
with:
name: callgrind gcc
tool: "benchmarkluau"
output-file-path: ./bench-gcc-output.txt
external-data-json-path: ./gh-pages/bench-gcc.json
- name: Store results (analyze)
uses: Roblox/rhysd-github-action-benchmark@v-luau

View File

@ -25,6 +25,6 @@ TypePackId clone(TypePackId tp, TypeArena& dest, CloneState& cloneState);
TypeId clone(TypeId tp, TypeArena& dest, CloneState& cloneState);
TypeFun clone(const TypeFun& typeFun, TypeArena& dest, CloneState& cloneState);
TypeId shallowClone(TypeId ty, TypeArena& dest, const TxnLog* log);
TypeId shallowClone(TypeId ty, TypeArena& dest, const TxnLog* log, bool alwaysClone = false);
} // namespace Luau

View File

@ -28,6 +28,7 @@ struct ConstraintGraphBuilder
std::vector<std::pair<Location, ScopePtr>> scopes;
ModuleName moduleName;
ModulePtr module;
SingletonTypes& singletonTypes;
const NotNull<TypeArena> arena;
// The root scope of the module we're generating constraints for.
@ -53,9 +54,9 @@ struct ConstraintGraphBuilder
// Occasionally constraint generation needs to produce an ICE.
const NotNull<InternalErrorReporter> ice;
NotNull<Scope> globalScope;
ScopePtr globalScope;
ConstraintGraphBuilder(const ModuleName& moduleName, TypeArena* arena, NotNull<InternalErrorReporter> ice, NotNull<Scope> globalScope);
ConstraintGraphBuilder(const ModuleName& moduleName, ModulePtr module, TypeArena* arena, NotNull<InternalErrorReporter> ice, const ScopePtr& globalScope);
/**
* Fabricates a new free type belonging to a given scope.
@ -103,6 +104,7 @@ struct ConstraintGraphBuilder
void visit(const ScopePtr& scope, AstStatBlock* block);
void visit(const ScopePtr& scope, AstStatLocal* local);
void visit(const ScopePtr& scope, AstStatFor* for_);
void visit(const ScopePtr& scope, AstStatWhile* while_);
void visit(const ScopePtr& scope, AstStatLocalFunction* function);
void visit(const ScopePtr& scope, AstStatFunction* function);
void visit(const ScopePtr& scope, AstStatReturn* ret);
@ -131,6 +133,7 @@ struct ConstraintGraphBuilder
TypeId check(const ScopePtr& scope, AstExprIndexExpr* indexExpr);
TypeId check(const ScopePtr& scope, AstExprUnary* unary);
TypeId check(const ScopePtr& scope, AstExprBinary* binary);
TypeId check(const ScopePtr& scope, AstExprTypeAssertion* typeAssert);
struct FunctionSignature
{

View File

@ -154,7 +154,7 @@ struct Frontend
LoadDefinitionFileResult loadDefinitionFile(std::string_view source, const std::string& packageName);
NotNull<Scope> getGlobalScope();
ScopePtr getGlobalScope();
private:
ModulePtr check(const SourceModule& sourceModule, Mode mode, const ScopePtr& environmentScope);

View File

@ -53,6 +53,7 @@ struct LintWarning
Code_MisleadingAndOr = 25,
Code_CommentDirective = 26,
Code_IntegerParsing = 27,
Code_ComparisonPrecedence = 28,
Code__Count
};

View File

@ -36,8 +36,6 @@ struct Scope
// All the children of this scope.
std::vector<NotNull<Scope>> children;
std::unordered_map<Symbol, Binding> bindings;
std::unordered_map<Name, TypeFun> typeBindings;
std::unordered_map<Name, TypePackId> typePackBindings;
TypePackId returnType;
std::optional<TypePackId> varargPack;
// All constraints belonging to this scope.
@ -52,8 +50,6 @@ struct Scope
std::unordered_map<Name, std::unordered_map<Name, TypeFun>> importedTypeBindings;
std::optional<TypeId> lookup(Symbol sym);
std::optional<TypeFun> lookupTypeBinding(const Name& name);
std::optional<TypePackId> lookupTypePackBinding(const Name& name);
std::optional<TypeFun> lookupType(const Name& name);
std::optional<TypeFun> lookupImportedType(const Name& moduleAlias, const Name& name);

View File

@ -16,6 +16,8 @@
#include <unordered_map>
#include <unordered_set>
LUAU_FASTFLAG(LuauClassTypeVarsInSubstitution)
namespace Luau
{
@ -57,6 +59,9 @@ struct Anyification : Substitution
bool ignoreChildren(TypeId ty) override
{
if (FFlag::LuauClassTypeVarsInSubstitution && get<ClassTypeVar>(ty))
return true;
return ty->persistent;
}
bool ignoreChildren(TypePackId ty) override

View File

@ -717,6 +717,7 @@ struct TypeIterator
stack.push_front({t, 0});
seen.insert(t);
descend();
}
TypeIterator<T>& operator++()
@ -748,17 +749,19 @@ struct TypeIterator
const TypeId& operator*()
{
LUAU_ASSERT(!stack.empty());
descend();
LUAU_ASSERT(!stack.empty());
auto [t, currentIndex] = stack.front();
LUAU_ASSERT(t);
const std::vector<TypeId>& types = getTypes(t);
LUAU_ASSERT(currentIndex < types.size());
const TypeId& ty = types[currentIndex];
LUAU_ASSERT(!get<T>(follow(ty)));
return ty;
}

View File

@ -109,11 +109,11 @@ private:
public:
void unifyLowerBound(TypePackId subTy, TypePackId superTy, TypeLevel demotedLevel);
// Report an "infinite type error" if the type "needle" already occurs within "haystack"
void occursCheck(TypeId needle, TypeId haystack);
void occursCheck(DenseHashSet<TypeId>& seen, TypeId needle, TypeId haystack);
void occursCheck(TypePackId needle, TypePackId haystack);
void occursCheck(DenseHashSet<TypePackId>& seen, TypePackId needle, TypePackId haystack);
// Returns true if the type "needle" already occurs within "haystack" and reports an "infinite type error"
bool occursCheck(TypeId needle, TypeId haystack);
bool occursCheck(DenseHashSet<TypeId>& seen, TypeId needle, TypeId haystack);
bool occursCheck(TypePackId needle, TypePackId haystack);
bool occursCheck(DenseHashSet<TypePackId>& seen, TypePackId needle, TypePackId haystack);
Unifier makeChildUnifier();

View File

@ -2,6 +2,8 @@
#include "Luau/ApplyTypeFunction.h"
LUAU_FASTFLAG(LuauClassTypeVarsInSubstitution)
namespace Luau
{
@ -31,6 +33,8 @@ bool ApplyTypeFunction::ignoreChildren(TypeId ty)
{
if (get<GenericTypeVar>(ty))
return true;
else if (FFlag::LuauClassTypeVarsInSubstitution && get<ClassTypeVar>(ty))
return true;
else
return false;
}

View File

@ -7,6 +7,7 @@
#include "Luau/Unifiable.h"
LUAU_FASTFLAG(DebugLuauCopyBeforeNormalizing)
LUAU_FASTFLAG(LuauClonePublicInterfaceLess)
LUAU_FASTINTVARIABLE(LuauTypeCloneRecursionLimit, 300)
@ -445,7 +446,7 @@ TypeFun clone(const TypeFun& typeFun, TypeArena& dest, CloneState& cloneState)
return result;
}
TypeId shallowClone(TypeId ty, TypeArena& dest, const TxnLog* log)
TypeId shallowClone(TypeId ty, TypeArena& dest, const TxnLog* log, bool alwaysClone)
{
ty = log->follow(ty);
@ -504,6 +505,15 @@ TypeId shallowClone(TypeId ty, TypeArena& dest, const TxnLog* log)
PendingExpansionTypeVar clone{petv->fn, petv->typeArguments, petv->packArguments};
result = dest.addType(std::move(clone));
}
else if (const ClassTypeVar* ctv = get<ClassTypeVar>(ty); FFlag::LuauClonePublicInterfaceLess && ctv && alwaysClone)
{
ClassTypeVar clone{ctv->name, ctv->props, ctv->parent, ctv->metatable, ctv->tags, ctv->userData, ctv->definitionModuleName};
result = dest.addType(std::move(clone));
}
else if (FFlag::LuauClonePublicInterfaceLess && alwaysClone)
{
result = dest.addType(*ty);
}
else
return result;

View File

@ -1,6 +1,8 @@
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "Luau/ConstraintGraphBuilder.h"
#include "Luau/Ast.h"
#include "Luau/Constraint.h"
#include "Luau/RecursionCounter.h"
#include "Luau/ToString.h"
@ -14,8 +16,9 @@ namespace Luau
const AstStat* getFallthrough(const AstStat* node); // TypeInfer.cpp
ConstraintGraphBuilder::ConstraintGraphBuilder(
const ModuleName& moduleName, TypeArena* arena, NotNull<InternalErrorReporter> ice, NotNull<Scope> globalScope)
const ModuleName& moduleName, ModulePtr module, TypeArena* arena, NotNull<InternalErrorReporter> ice, const ScopePtr& globalScope)
: moduleName(moduleName)
, module(module)
, singletonTypes(getSingletonTypes())
, arena(arena)
, rootScope(nullptr)
@ -61,7 +64,7 @@ void ConstraintGraphBuilder::visit(AstStatBlock* block)
{
LUAU_ASSERT(scopes.empty());
LUAU_ASSERT(rootScope == nullptr);
ScopePtr scope = std::make_shared<Scope>(singletonTypes.anyTypePack);
ScopePtr scope = std::make_shared<Scope>(globalScope);
rootScope = scope.get();
scopes.emplace_back(block->location, scope);
@ -70,11 +73,11 @@ void ConstraintGraphBuilder::visit(AstStatBlock* block)
prepopulateGlobalScope(scope, block);
// TODO: We should share the global scope.
rootScope->typeBindings["nil"] = TypeFun{singletonTypes.nilType};
rootScope->typeBindings["number"] = TypeFun{singletonTypes.numberType};
rootScope->typeBindings["string"] = TypeFun{singletonTypes.stringType};
rootScope->typeBindings["boolean"] = TypeFun{singletonTypes.booleanType};
rootScope->typeBindings["thread"] = TypeFun{singletonTypes.threadType};
rootScope->privateTypeBindings["nil"] = TypeFun{singletonTypes.nilType};
rootScope->privateTypeBindings["number"] = TypeFun{singletonTypes.numberType};
rootScope->privateTypeBindings["string"] = TypeFun{singletonTypes.stringType};
rootScope->privateTypeBindings["boolean"] = TypeFun{singletonTypes.booleanType};
rootScope->privateTypeBindings["thread"] = TypeFun{singletonTypes.threadType};
visitBlockWithoutChildScope(scope, block);
}
@ -99,7 +102,7 @@ void ConstraintGraphBuilder::visitBlockWithoutChildScope(const ScopePtr& scope,
{
if (auto alias = stat->as<AstStatTypeAlias>())
{
if (scope->typeBindings.count(alias->name.value) != 0)
if (scope->privateTypeBindings.count(alias->name.value) != 0)
{
auto it = aliasDefinitionLocations.find(alias->name.value);
LUAU_ASSERT(it != aliasDefinitionLocations.end());
@ -121,16 +124,16 @@ void ConstraintGraphBuilder::visitBlockWithoutChildScope(const ScopePtr& scope,
for (const auto& [name, gen] : createGenerics(defnScope, alias->generics))
{
initialFun.typeParams.push_back(gen);
defnScope->typeBindings[name] = TypeFun{gen.ty};
defnScope->privateTypeBindings[name] = TypeFun{gen.ty};
}
for (const auto& [name, genPack] : createGenericPacks(defnScope, alias->genericPacks))
{
initialFun.typePackParams.push_back(genPack);
defnScope->typePackBindings[name] = genPack.tp;
defnScope->privateTypePackBindings[name] = genPack.tp;
}
scope->typeBindings[alias->name.value] = std::move(initialFun);
scope->privateTypeBindings[alias->name.value] = std::move(initialFun);
astTypeAliasDefiningScopes[alias] = defnScope;
aliasDefinitionLocations[alias->name.value] = alias->location;
}
@ -150,6 +153,8 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStat* stat)
visit(scope, s);
else if (auto s = stat->as<AstStatFor>())
visit(scope, s);
else if (auto s = stat->as<AstStatWhile>())
visit(scope, s);
else if (auto f = stat->as<AstStatFunction>())
visit(scope, f);
else if (auto f = stat->as<AstStatLocalFunction>())
@ -242,6 +247,15 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFor* for_)
visit(forScope, for_->body);
}
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatWhile* while_)
{
check(scope, while_->condition);
ScopePtr whileScope = childScope(while_->location, scope);
visit(whileScope, while_->body);
}
void addConstraints(Constraint* constraint, NotNull<Scope> scope)
{
scope->constraints.reserve(scope->constraints.size() + scope->constraints.size());
@ -388,11 +402,11 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatTypeAlias* alia
{
// TODO: Exported type aliases
auto bindingIt = scope->typeBindings.find(alias->name.value);
auto bindingIt = scope->privateTypeBindings.find(alias->name.value);
ScopePtr* defnIt = astTypeAliasDefiningScopes.find(alias);
// These will be undefined if the alias was a duplicate definition, in which
// case we just skip over it.
if (bindingIt == scope->typeBindings.end() || defnIt == nullptr)
if (bindingIt == scope->privateTypeBindings.end() || defnIt == nullptr)
{
return;
}
@ -416,17 +430,152 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareGlobal*
LUAU_ASSERT(global->type);
TypeId globalTy = resolveType(scope, global->type);
Name globalName(global->name.value);
module->declaredGlobals[globalName] = globalTy;
scope->bindings[global->name] = Binding{globalTy, global->location};
}
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareClass* global)
static bool isMetamethod(const Name& name)
{
LUAU_ASSERT(false); // TODO: implement
return name == "__index" || name == "__newindex" || name == "__call" || name == "__concat" || name == "__unm" || name == "__add" ||
name == "__sub" || name == "__mul" || name == "__div" || name == "__mod" || name == "__pow" || name == "__tostring" ||
name == "__metatable" || name == "__eq" || name == "__lt" || name == "__le" || name == "__mode" || name == "__iter" || name == "__len";
}
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareClass* declaredClass)
{
std::optional<TypeId> superTy = std::nullopt;
if (declaredClass->superName)
{
Name superName = Name(declaredClass->superName->value);
std::optional<TypeFun> lookupType = scope->lookupType(superName);
if (!lookupType)
{
reportError(declaredClass->location, UnknownSymbol{superName, UnknownSymbol::Type});
return;
}
// We don't have generic classes, so this assertion _should_ never be hit.
LUAU_ASSERT(lookupType->typeParams.size() == 0 && lookupType->typePackParams.size() == 0);
superTy = lookupType->type;
if (!get<ClassTypeVar>(follow(*superTy)))
{
reportError(declaredClass->location,
GenericError{format("Cannot use non-class type '%s' as a superclass of class '%s'", superName.c_str(), declaredClass->name.value)});
return;
}
}
Name className(declaredClass->name.value);
TypeId classTy = arena->addType(ClassTypeVar(className, {}, superTy, std::nullopt, {}, {}, moduleName));
ClassTypeVar* ctv = getMutable<ClassTypeVar>(classTy);
TypeId metaTy = arena->addType(TableTypeVar{TableState::Sealed, scope->level});
TableTypeVar* metatable = getMutable<TableTypeVar>(metaTy);
ctv->metatable = metaTy;
scope->exportedTypeBindings[className] = TypeFun{{}, classTy};
for (const AstDeclaredClassProp& prop : declaredClass->props)
{
Name propName(prop.name.value);
TypeId propTy = resolveType(scope, prop.ty);
bool assignToMetatable = isMetamethod(propName);
// Function types always take 'self', but this isn't reflected in the
// parsed annotation. Add it here.
if (prop.isMethod)
{
if (FunctionTypeVar* ftv = getMutable<FunctionTypeVar>(propTy))
{
ftv->argNames.insert(ftv->argNames.begin(), FunctionArgument{"self", {}});
ftv->argTypes = arena->addTypePack(TypePack{{classTy}, ftv->argTypes});
ftv->hasSelf = true;
}
}
if (ctv->props.count(propName) == 0)
{
if (assignToMetatable)
metatable->props[propName] = {propTy};
else
ctv->props[propName] = {propTy};
}
else
{
TypeId currentTy = assignToMetatable ? metatable->props[propName].type : ctv->props[propName].type;
// We special-case this logic to keep the intersection flat; otherwise we
// would create a ton of nested intersection types.
if (const IntersectionTypeVar* itv = get<IntersectionTypeVar>(currentTy))
{
std::vector<TypeId> options = itv->parts;
options.push_back(propTy);
TypeId newItv = arena->addType(IntersectionTypeVar{std::move(options)});
if (assignToMetatable)
metatable->props[propName] = {newItv};
else
ctv->props[propName] = {newItv};
}
else if (get<FunctionTypeVar>(currentTy))
{
TypeId intersection = arena->addType(IntersectionTypeVar{{currentTy, propTy}});
if (assignToMetatable)
metatable->props[propName] = {intersection};
else
ctv->props[propName] = {intersection};
}
else
{
reportError(declaredClass->location, GenericError{format("Cannot overload non-function class member '%s'", propName.c_str())});
}
}
}
}
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareFunction* global)
{
LUAU_ASSERT(false); // TODO: implement
std::vector<std::pair<Name, GenericTypeDefinition>> generics = createGenerics(scope, global->generics);
std::vector<std::pair<Name, GenericTypePackDefinition>> genericPacks = createGenericPacks(scope, global->genericPacks);
std::vector<TypeId> genericTys;
genericTys.reserve(generics.size());
for (auto& [name, generic] : generics)
genericTys.push_back(generic.ty);
std::vector<TypePackId> genericTps;
genericTps.reserve(genericPacks.size());
for (auto& [name, generic] : genericPacks)
genericTps.push_back(generic.tp);
ScopePtr funScope = scope;
if (!generics.empty() || !genericPacks.empty())
funScope = childScope(global->location, scope);
TypePackId paramPack = resolveTypePack(funScope, global->params);
TypePackId retPack = resolveTypePack(funScope, global->retTypes);
TypeId fnType = arena->addType(FunctionTypeVar{funScope->level, std::move(genericTys), std::move(genericTps), paramPack, retPack});
FunctionTypeVar* ftv = getMutable<FunctionTypeVar>(fnType);
ftv->argNames.reserve(global->paramNames.size);
for (const auto& el : global->paramNames)
ftv->argNames.push_back(FunctionArgument{el.first.value, el.second});
Name fnName(global->name.value);
module->declaredGlobals[fnName] = fnType;
scope->bindings[global->name] = Binding{fnType, global->location};
}
TypePackId ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstArray<AstExpr*> exprs)
@ -590,6 +739,8 @@ TypeId ConstraintGraphBuilder::check(const ScopePtr& scope, AstExpr* expr)
result = check(scope, unary);
else if (auto binary = expr->as<AstExprBinary>())
result = check(scope, binary);
else if (auto typeAssert = expr->as<AstExprTypeAssertion>())
result = check(scope, typeAssert);
else if (auto err = expr->as<AstExprError>())
{
// Open question: Should we traverse into this?
@ -682,6 +833,12 @@ TypeId ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprBinary* binar
return nullptr;
}
TypeId ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprTypeAssertion* typeAssert)
{
check(scope, typeAssert->expr);
return resolveType(scope, typeAssert->annotation);
}
TypeId ConstraintGraphBuilder::checkExprTable(const ScopePtr& scope, AstExprTable* expr)
{
TypeId ty = arena->addType(TableTypeVar{});
@ -765,13 +922,13 @@ ConstraintGraphBuilder::FunctionSignature ConstraintGraphBuilder::checkFunctionS
for (const auto& [name, g] : genericDefinitions)
{
genericTypes.push_back(g.ty);
signatureScope->typeBindings[name] = TypeFun{g.ty};
signatureScope->privateTypeBindings[name] = TypeFun{g.ty};
}
for (const auto& [name, g] : genericPackDefinitions)
{
genericTypePacks.push_back(g.tp);
signatureScope->typePackBindings[name] = g.tp;
signatureScope->privateTypePackBindings[name] = g.tp;
}
}
else
@ -851,7 +1008,7 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
// TODO: Support imported types w/ require tracing.
LUAU_ASSERT(!ref->prefix);
std::optional<TypeFun> alias = scope->lookupTypeBinding(ref->name.value);
std::optional<TypeFun> alias = scope->lookupType(ref->name.value);
if (alias.has_value())
{
@ -949,13 +1106,13 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
for (const auto& [name, g] : genericDefinitions)
{
genericTypes.push_back(g.ty);
signatureScope->typeBindings[name] = TypeFun{g.ty};
signatureScope->privateTypeBindings[name] = TypeFun{g.ty};
}
for (const auto& [name, g] : genericPackDefinitions)
{
genericTypePacks.push_back(g.tp);
signatureScope->typePackBindings[name] = g.tp;
signatureScope->privateTypePackBindings[name] = g.tp;
}
}
else
@ -1059,7 +1216,7 @@ TypePackId ConstraintGraphBuilder::resolveTypePack(const ScopePtr& scope, AstTyp
}
else if (auto gen = tp->as<AstTypePackGeneric>())
{
if (std::optional<TypePackId> lookup = scope->lookupTypePackBinding(gen->genericName.value))
if (std::optional<TypePackId> lookup = scope->lookupPack(gen->genericName.value))
{
result = *lookup;
}

View File

@ -484,6 +484,10 @@ bool ConstraintSolver::tryDispatch(const NameConstraint& c, NotNull<const Constr
return block(c.namedType, constraint);
TypeId target = follow(c.namedType);
if (target->persistent)
return true;
if (TableTypeVar* ttv = getMutable<TableTypeVar>(target))
ttv->name = c.name;
else if (MetatableTypeVar* mtv = getMutable<MetatableTypeVar>(target))
@ -637,6 +641,10 @@ bool ConstraintSolver::tryDispatch(const TypeAliasExpansionConstraint& c, NotNul
TypeId instantiated = *maybeInstantiated;
TypeId target = follow(instantiated);
if (target->persistent)
return true;
// Type function application will happily give us the exact same type if
// there are e.g. generic saturatedTypeArguments that go unused.
bool needsClone = follow(petv->fn.type) == target;

View File

@ -818,21 +818,21 @@ const SourceModule* Frontend::getSourceModule(const ModuleName& moduleName) cons
return const_cast<Frontend*>(this)->getSourceModule(moduleName);
}
NotNull<Scope> Frontend::getGlobalScope()
ScopePtr Frontend::getGlobalScope()
{
if (!globalScope)
{
globalScope = typeChecker.globalScope;
}
return NotNull(globalScope.get());
return globalScope;
}
ModulePtr Frontend::check(const SourceModule& sourceModule, Mode mode, const ScopePtr& environmentScope)
{
ModulePtr result = std::make_shared<Module>();
ConstraintGraphBuilder cgb{sourceModule.name, &result->internalTypes, NotNull(&iceHandler), getGlobalScope()};
ConstraintGraphBuilder cgb{sourceModule.name, result, &result->internalTypes, NotNull(&iceHandler), getGlobalScope()};
cgb.visit(sourceModule.root);
result->errors = std::move(cgb.errors);

View File

@ -82,6 +82,8 @@ bool ReplaceGenerics::ignoreChildren(TypeId ty)
// whenever we quantify, so the vectors overlap if and only if they are equal.
return (!generics.empty() || !genericPacks.empty()) && (ftv->generics == generics) && (ftv->genericPacks == genericPacks);
}
else if (FFlag::LuauClassTypeVarsInSubstitution && get<ClassTypeVar>(ty))
return true;
else
{
return false;

View File

@ -14,6 +14,7 @@
LUAU_FASTINTVARIABLE(LuauSuggestionDistance, 4)
LUAU_FASTFLAGVARIABLE(LuauLintGlobalNeverReadBeforeWritten, false)
LUAU_FASTFLAGVARIABLE(LuauLintComparisonPrecedence, false)
namespace Luau
{
@ -49,6 +50,7 @@ static const char* kWarningNames[] = {
"MisleadingAndOr",
"CommentDirective",
"IntegerParsing",
"ComparisonPrecedence",
};
// clang-format on
@ -2629,6 +2631,65 @@ private:
}
};
class LintComparisonPrecedence : AstVisitor
{
public:
LUAU_NOINLINE static void process(LintContext& context)
{
LintComparisonPrecedence pass;
pass.context = &context;
context.root->visit(&pass);
}
private:
LintContext* context;
bool isComparison(AstExprBinary::Op op)
{
return op == AstExprBinary::CompareNe || op == AstExprBinary::CompareEq || op == AstExprBinary::CompareLt || op == AstExprBinary::CompareLe ||
op == AstExprBinary::CompareGt || op == AstExprBinary::CompareGe;
}
bool isNot(AstExpr* node)
{
AstExprUnary* expr = node->as<AstExprUnary>();
return expr && expr->op == AstExprUnary::Not;
}
bool visit(AstExprBinary* node) override
{
if (!isComparison(node->op))
return true;
// not X == Y; we silence this for not X == not Y as it's likely an intentional boolean comparison
if (isNot(node->left) && !isNot(node->right))
{
std::string op = toString(node->op);
if (node->op == AstExprBinary::CompareEq || node->op == AstExprBinary::CompareNe)
emitWarning(*context, LintWarning::Code_ComparisonPrecedence, node->location,
"not X %s Y is equivalent to (not X) %s Y; consider using X %s Y, or wrap one of the expressions in parentheses to silence",
op.c_str(), op.c_str(), node->op == AstExprBinary::CompareEq ? "~=" : "==");
else
emitWarning(*context, LintWarning::Code_ComparisonPrecedence, node->location,
"not X %s Y is equivalent to (not X) %s Y; wrap one of the expressions in parentheses to silence", op.c_str(), op.c_str());
}
else if (AstExprBinary* left = node->left->as<AstExprBinary>(); left && isComparison(left->op))
{
std::string lop = toString(left->op);
std::string rop = toString(node->op);
emitWarning(*context, LintWarning::Code_ComparisonPrecedence, node->location,
"X %s Y %s Z is equivalent to (X %s Y) %s Z; wrap one of the expressions in parentheses to silence", lop.c_str(), rop.c_str(),
lop.c_str(), rop.c_str());
}
return true;
}
};
static void fillBuiltinGlobals(LintContext& context, const AstNameTable& names, const ScopePtr& env)
{
ScopePtr current = env;
@ -2853,6 +2914,9 @@ std::vector<LintWarning> lint(AstStat* root, const AstNameTable& names, const Sc
if (context.warningEnabled(LintWarning::Code_IntegerParsing))
LintIntegerParsing::process(context);
if (context.warningEnabled(LintWarning::Code_ComparisonPrecedence) && FFlag::LuauLintComparisonPrecedence)
LintComparisonPrecedence::process(context);
std::sort(context.result.begin(), context.result.end(), WarningComparator());
return context.result;

View File

@ -17,6 +17,10 @@
LUAU_FASTFLAG(LuauLowerBoundsCalculation);
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
LUAU_FASTFLAGVARIABLE(LuauForceExportSurfacesToBeNormal, false);
LUAU_FASTFLAGVARIABLE(LuauClonePublicInterfaceLess, false);
LUAU_FASTFLAG(LuauSubstitutionReentrant);
LUAU_FASTFLAG(LuauClassTypeVarsInSubstitution);
LUAU_FASTFLAG(LuauSubstitutionFixMissingFields);
namespace Luau
{
@ -86,6 +90,118 @@ struct ForceNormal : TypeVarOnceVisitor
}
};
struct ClonePublicInterface : Substitution
{
NotNull<Module> module;
ClonePublicInterface(const TxnLog* log, Module* module)
: Substitution(log, &module->interfaceTypes)
, module(module)
{
LUAU_ASSERT(module);
}
bool isDirty(TypeId ty) override
{
if (ty->owningArena == &module->internalTypes)
return true;
if (const FunctionTypeVar* ftv = get<FunctionTypeVar>(ty))
return ftv->level.level != 0;
if (const TableTypeVar* ttv = get<TableTypeVar>(ty))
return ttv->level.level != 0;
return false;
}
bool isDirty(TypePackId tp) override
{
return tp->owningArena == &module->internalTypes;
}
TypeId clean(TypeId ty) override
{
TypeId result = clone(ty);
if (FunctionTypeVar* ftv = getMutable<FunctionTypeVar>(result))
ftv->level = TypeLevel{0, 0};
else if (TableTypeVar* ttv = getMutable<TableTypeVar>(result))
ttv->level = TypeLevel{0, 0};
return result;
}
TypePackId clean(TypePackId tp) override
{
return clone(tp);
}
TypeId cloneType(TypeId ty)
{
LUAU_ASSERT(FFlag::LuauSubstitutionReentrant && FFlag::LuauSubstitutionFixMissingFields);
std::optional<TypeId> result = substitute(ty);
if (result)
{
return *result;
}
else
{
module->errors.push_back(TypeError{module->scopes[0].first, UnificationTooComplex{}});
return getSingletonTypes().errorRecoveryType();
}
}
TypePackId cloneTypePack(TypePackId tp)
{
LUAU_ASSERT(FFlag::LuauSubstitutionReentrant && FFlag::LuauSubstitutionFixMissingFields);
std::optional<TypePackId> result = substitute(tp);
if (result)
{
return *result;
}
else
{
module->errors.push_back(TypeError{module->scopes[0].first, UnificationTooComplex{}});
return getSingletonTypes().errorRecoveryTypePack();
}
}
TypeFun cloneTypeFun(const TypeFun& tf)
{
LUAU_ASSERT(FFlag::LuauSubstitutionReentrant && FFlag::LuauSubstitutionFixMissingFields);
std::vector<GenericTypeDefinition> typeParams;
std::vector<GenericTypePackDefinition> typePackParams;
for (GenericTypeDefinition typeParam : tf.typeParams)
{
TypeId ty = cloneType(typeParam.ty);
std::optional<TypeId> defaultValue;
if (typeParam.defaultValue)
defaultValue = cloneType(*typeParam.defaultValue);
typeParams.push_back(GenericTypeDefinition{ty, defaultValue});
}
for (GenericTypePackDefinition typePackParam : tf.typePackParams)
{
TypePackId tp = cloneTypePack(typePackParam.tp);
std::optional<TypePackId> defaultValue;
if (typePackParam.defaultValue)
defaultValue = cloneTypePack(*typePackParam.defaultValue);
typePackParams.push_back(GenericTypePackDefinition{tp, defaultValue});
}
TypeId type = cloneType(tf.type);
return TypeFun{typeParams, typePackParams, type};
}
};
Module::~Module()
{
unfreeze(interfaceTypes);
@ -106,12 +222,21 @@ void Module::clonePublicInterface(InternalErrorReporter& ice)
std::unordered_map<Name, TypeFun>* exportedTypeBindings =
FFlag::DebugLuauDeferredConstraintResolution ? nullptr : &moduleScope->exportedTypeBindings;
returnType = clone(returnType, interfaceTypes, cloneState);
TxnLog log;
ClonePublicInterface clonePublicInterface{&log, this};
if (FFlag::LuauClonePublicInterfaceLess)
returnType = clonePublicInterface.cloneTypePack(returnType);
else
returnType = clone(returnType, interfaceTypes, cloneState);
moduleScope->returnType = returnType;
if (varargPack)
{
varargPack = clone(*varargPack, interfaceTypes, cloneState);
if (FFlag::LuauClonePublicInterfaceLess)
varargPack = clonePublicInterface.cloneTypePack(*varargPack);
else
varargPack = clone(*varargPack, interfaceTypes, cloneState);
moduleScope->varargPack = varargPack;
}
@ -134,7 +259,10 @@ void Module::clonePublicInterface(InternalErrorReporter& ice)
{
for (auto& [name, tf] : *exportedTypeBindings)
{
tf = clone(tf, interfaceTypes, cloneState);
if (FFlag::LuauClonePublicInterfaceLess)
tf = clonePublicInterface.cloneTypeFun(tf);
else
tf = clone(tf, interfaceTypes, cloneState);
if (FFlag::LuauLowerBoundsCalculation)
{
normalize(tf.type, interfaceTypes, ice);
@ -168,7 +296,10 @@ void Module::clonePublicInterface(InternalErrorReporter& ice)
for (auto& [name, ty] : declaredGlobals)
{
ty = clone(ty, interfaceTypes, cloneState);
if (FFlag::LuauClonePublicInterfaceLess)
ty = clonePublicInterface.cloneType(ty);
else
ty = clone(ty, interfaceTypes, cloneState);
if (FFlag::LuauLowerBoundsCalculation)
{
normalize(ty, interfaceTypes, ice);

View File

@ -162,7 +162,8 @@ struct Normalize final : TypeVarVisitor
// It should never be the case that this TypeVar is normal, but is bound to a non-normal type, except in nontrivial cases.
LUAU_ASSERT(!ty->normal || ty->normal == btv.boundTo->normal);
asMutable(ty)->normal = btv.boundTo->normal;
if (!ty->normal)
asMutable(ty)->normal = btv.boundTo->normal;
return !ty->normal;
}

View File

@ -5,11 +5,13 @@
#include "Luau/Scope.h"
#include "Luau/Substitution.h"
#include "Luau/TxnLog.h"
#include "Luau/TypeVar.h"
#include "Luau/VisitTypeVar.h"
LUAU_FASTFLAG(DebugLuauSharedSelf)
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
LUAU_FASTFLAGVARIABLE(LuauQuantifyConstrained, false)
LUAU_FASTFLAG(LuauClassTypeVarsInSubstitution)
namespace Luau
{
@ -297,6 +299,9 @@ struct PureQuantifier : Substitution
bool ignoreChildren(TypeId ty) override
{
if (FFlag::LuauClassTypeVarsInSubstitution && get<ClassTypeVar>(ty))
return true;
return ty->persistent;
}
bool ignoreChildren(TypePackId ty) override

View File

@ -122,34 +122,4 @@ std::optional<TypeId> Scope::lookup(Symbol sym)
}
}
std::optional<TypeFun> Scope::lookupTypeBinding(const Name& name)
{
Scope* s = this;
while (s)
{
auto it = s->typeBindings.find(name);
if (it != s->typeBindings.end())
return it->second;
s = s->parent.get();
}
return std::nullopt;
}
std::optional<TypePackId> Scope::lookupTypePackBinding(const Name& name)
{
Scope* s = this;
while (s)
{
auto it = s->typePackBindings.find(name);
if (it != s->typePackBindings.end())
return it->second;
s = s->parent.get();
}
return std::nullopt;
}
} // namespace Luau

View File

@ -8,9 +8,9 @@
#include <algorithm>
#include <stdexcept>
LUAU_FASTFLAGVARIABLE(LuauAnyificationMustClone, false)
LUAU_FASTFLAGVARIABLE(LuauSubstitutionFixMissingFields, false)
LUAU_FASTFLAG(LuauLowerBoundsCalculation)
LUAU_FASTFLAG(LuauClonePublicInterfaceLess)
LUAU_FASTINTVARIABLE(LuauTarjanChildLimit, 10000)
LUAU_FASTFLAGVARIABLE(LuauClassTypeVarsInSubstitution, false)
LUAU_FASTFLAG(LuauUnknownAndNeverType)
@ -472,7 +472,7 @@ std::optional<TypePackId> Substitution::substitute(TypePackId tp)
TypeId Substitution::clone(TypeId ty)
{
return shallowClone(ty, *arena, log);
return shallowClone(ty, *arena, log, /* alwaysClone */ FFlag::LuauClonePublicInterfaceLess);
}
TypePackId Substitution::clone(TypePackId tp)
@ -497,6 +497,10 @@ TypePackId Substitution::clone(TypePackId tp)
clone.hidden = vtp->hidden;
return addTypePack(std::move(clone));
}
else if (FFlag::LuauClonePublicInterfaceLess)
{
return addTypePack(*tp);
}
else
return tp;
}
@ -557,7 +561,7 @@ void Substitution::replaceChildren(TypeId ty)
if (ignoreChildren(ty))
return;
if (FFlag::LuauAnyificationMustClone && ty->owningArena != arena)
if (ty->owningArena != arena)
return;
if (FunctionTypeVar* ftv = getMutable<FunctionTypeVar>(ty))
@ -638,7 +642,7 @@ void Substitution::replaceChildren(TypePackId tp)
if (ignoreChildren(tp))
return;
if (FFlag::LuauAnyificationMustClone && tp->owningArena != arena)
if (tp->owningArena != arena)
return;
if (TypePack* tpp = getMutable<TypePack>(tp))

View File

@ -10,6 +10,7 @@
#include "Luau/Normalize.h"
#include "Luau/TxnLog.h"
#include "Luau/TypeUtils.h"
#include "Luau/TypeVar.h"
#include "Luau/Unifier.h"
#include "Luau/ToString.h"
@ -282,18 +283,14 @@ struct TypeChecker2 : public AstVisitor
// leftType must have a property called indexName->index
std::optional<TypeId> t = findTablePropertyRespectingMeta(module->errors, leftType, indexName->index.value, indexName->location);
if (t)
std::optional<TypeId> ty = getIndexTypeFromType(module->getModuleScope(), leftType, indexName->index.value, indexName->location, /* addErrors */ true);
if (ty)
{
if (!isSubtype(resultType, *t, ice))
if (!isSubtype(resultType, *ty, ice))
{
reportError(TypeMismatch{resultType, *t}, indexName->location);
reportError(TypeMismatch{resultType, *ty}, indexName->location);
}
}
else
{
reportError(UnknownProperty{leftType, indexName->index.value}, indexName->location);
}
return true;
}
@ -324,6 +321,22 @@ struct TypeChecker2 : public AstVisitor
return true;
}
bool visit(AstExprTypeAssertion* expr) override
{
TypeId annotationType = lookupAnnotation(expr->annotation);
TypeId computedType = lookupType(expr->expr);
// Note: As an optimization, we try 'number <: number | string' first, as that is the more likely case.
if (isSubtype(annotationType, computedType, ice))
return true;
if (isSubtype(computedType, annotationType, ice))
return true;
reportError(TypesAreUnrelated{computedType, annotationType}, expr->location);
return true;
}
/** Extract a TypeId for the first type of the provided pack.
*
* Note that this may require modifying some types. I hope this doesn't cause problems!
@ -374,7 +387,7 @@ struct TypeChecker2 : public AstVisitor
// TODO: Imported types
std::optional<TypeFun> alias = scope->lookupTypeBinding(ty->name.value);
std::optional<TypeFun> alias = scope->lookupType(ty->name.value);
if (alias.has_value())
{
@ -473,7 +486,7 @@ struct TypeChecker2 : public AstVisitor
}
else
{
if (scope->lookupTypePackBinding(ty->name.value))
if (scope->lookupPack(ty->name.value))
{
reportError(
SwappedGenericTypeParameter{
@ -501,10 +514,10 @@ struct TypeChecker2 : public AstVisitor
Scope* scope = findInnermostScope(tp->location);
LUAU_ASSERT(scope);
std::optional<TypePackId> alias = scope->lookupTypePackBinding(tp->genericName.value);
std::optional<TypePackId> alias = scope->lookupPack(tp->genericName.value);
if (!alias.has_value())
{
if (scope->lookupTypeBinding(tp->genericName.value))
if (scope->lookupType(tp->genericName.value))
{
reportError(
SwappedGenericTypeParameter{
@ -531,6 +544,142 @@ struct TypeChecker2 : public AstVisitor
{
module->errors.emplace_back(std::move(e));
}
std::optional<TypeId> getIndexTypeFromType(
const ScopePtr& scope, TypeId type, const Name& name, const Location& location, bool addErrors)
{
type = follow(type);
if (get<ErrorTypeVar>(type) || get<AnyTypeVar>(type) || get<NeverTypeVar>(type))
return type;
if (auto f = get<FreeTypeVar>(type))
*asMutable(type) = TableTypeVar{TableState::Free, f->level};
if (isString(type))
{
std::optional<TypeId> mtIndex = Luau::findMetatableEntry(module->errors, singletonTypes.stringType, "__index", location);
LUAU_ASSERT(mtIndex);
type = *mtIndex;
}
if (TableTypeVar* tableType = getMutableTableType(type))
{
return findTablePropertyRespectingMeta(module->errors, type, name, location);
}
else if (const ClassTypeVar* cls = get<ClassTypeVar>(type))
{
const Property* prop = lookupClassProp(cls, name);
if (prop)
return prop->type;
}
else if (const UnionTypeVar* utv = get<UnionTypeVar>(type))
{
std::vector<TypeId> goodOptions;
std::vector<TypeId> badOptions;
for (TypeId t : utv)
{
// TODO: we should probably limit recursion here?
// RecursionLimiter _rl(&recursionCount, FInt::LuauTypeInferRecursionLimit);
// Not needed when we normalize types.
if (get<AnyTypeVar>(follow(t)))
return t;
if (std::optional<TypeId> ty = getIndexTypeFromType(scope, t, name, location, /* addErrors= */ false))
goodOptions.push_back(*ty);
else
badOptions.push_back(t);
}
if (!badOptions.empty())
{
if (addErrors)
{
if (goodOptions.empty())
reportError(UnknownProperty{type, name}, location);
else
reportError(MissingUnionProperty{type, badOptions, name}, location);
}
return std::nullopt;
}
std::vector<TypeId> result = reduceUnion(goodOptions);
if (result.empty())
return singletonTypes.neverType;
if (result.size() == 1)
return result[0];
return module->internalTypes.addType(UnionTypeVar{std::move(result)});
}
else if (const IntersectionTypeVar* itv = get<IntersectionTypeVar>(type))
{
std::vector<TypeId> parts;
for (TypeId t : itv->parts)
{
// TODO: we should probably limit recursion here?
// RecursionLimiter _rl(&recursionCount, FInt::LuauTypeInferRecursionLimit);
if (std::optional<TypeId> ty = getIndexTypeFromType(scope, t, name, location, /* addErrors= */ false))
parts.push_back(*ty);
}
// If no parts of the intersection had the property we looked up for, it never existed at all.
if (parts.empty())
{
if (addErrors)
reportError(UnknownProperty{type, name}, location);
return std::nullopt;
}
if (parts.size() == 1)
return parts[0];
return module->internalTypes.addType(IntersectionTypeVar{std::move(parts)}); // Not at all correct.
}
if (addErrors)
reportError(UnknownProperty{type, name}, location);
return std::nullopt;
}
std::vector<TypeId> reduceUnion(const std::vector<TypeId>& types)
{
std::vector<TypeId> result;
for (TypeId t : types)
{
t = follow(t);
if (get<NeverTypeVar>(t))
continue;
if (get<ErrorTypeVar>(t) || get<AnyTypeVar>(t))
return {t};
if (const UnionTypeVar* utv = get<UnionTypeVar>(t))
{
for (TypeId ty : utv)
{
ty = follow(ty);
if (get<NeverTypeVar>(ty))
continue;
if (get<ErrorTypeVar>(ty) || get<AnyTypeVar>(ty))
return {ty};
if (result.end() == std::find(result.begin(), result.end(), ty))
result.push_back(ty);
}
}
else if (std::find(result.begin(), result.end(), t) == result.end())
result.push_back(t);
}
return result;
}
};
void check(const SourceModule& sourceModule, Module* module)

View File

@ -33,7 +33,6 @@ LUAU_FASTINTVARIABLE(LuauVisitRecursionLimit, 500)
LUAU_FASTFLAG(LuauKnowsTheDataModel3)
LUAU_FASTFLAG(LuauAutocompleteDynamicLimits)
LUAU_FASTFLAGVARIABLE(LuauExpectedTableUnionIndexerType, false)
LUAU_FASTFLAGVARIABLE(LuauIndexSilenceErrors, false)
LUAU_FASTFLAGVARIABLE(LuauLowerBoundsCalculation, false)
LUAU_FASTFLAGVARIABLE(DebugLuauFreezeDuringUnification, false)
LUAU_FASTFLAGVARIABLE(LuauSelfCallAutocompleteFix3, false)
@ -830,6 +829,14 @@ struct Demoter : Substitution
return get<FreeTypePack>(tp);
}
bool ignoreChildren(TypeId ty) override
{
if (FFlag::LuauClassTypeVarsInSubstitution && get<ClassTypeVar>(ty))
return true;
return false;
}
TypeId clean(TypeId ty) override
{
auto ftv = get<FreeTypeVar>(ty);
@ -1925,7 +1932,7 @@ std::optional<TypeId> TypeChecker::findTablePropertyRespectingMeta(TypeId lhsTyp
{
ErrorVec errors;
auto result = Luau::findTablePropertyRespectingMeta(errors, lhsType, name, location);
if (!FFlag::LuauIndexSilenceErrors || addErrors)
if (addErrors)
reportErrors(errors);
return result;
}
@ -1934,7 +1941,7 @@ std::optional<TypeId> TypeChecker::findMetatableEntry(TypeId type, std::string e
{
ErrorVec errors;
auto result = Luau::findMetatableEntry(errors, type, entry, location);
if (!FFlag::LuauIndexSilenceErrors || addErrors)
if (addErrors)
reportErrors(errors);
return result;
}
@ -1946,7 +1953,7 @@ std::optional<TypeId> TypeChecker::getIndexTypeFromType(
std::optional<TypeId> result = getIndexTypeFromTypeImpl(scope, type, name, location, addErrors);
if (FFlag::LuauIndexSilenceErrors && !addErrors)
if (!addErrors)
LUAU_ASSERT(errorCount == currentModule->errors.size());
return result;

View File

@ -22,6 +22,7 @@ LUAU_FASTFLAG(LuauErrorRecoveryType);
LUAU_FASTFLAG(LuauUnknownAndNeverType)
LUAU_FASTFLAG(LuauQuantifyConstrained)
LUAU_FASTFLAGVARIABLE(LuauScalarShapeSubtyping, false)
LUAU_FASTFLAG(LuauClassTypeVarsInSubstitution)
namespace Luau
{
@ -273,6 +274,9 @@ TypePackId Widen::clean(TypePackId)
bool Widen::ignoreChildren(TypeId ty)
{
if (FFlag::LuauClassTypeVarsInSubstitution && get<ClassTypeVar>(ty))
return true;
return !log->is<UnionTypeVar>(ty);
}
@ -370,25 +374,14 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
if (superFree && subFree && superFree->level.subsumes(subFree->level))
{
occursCheck(subTy, superTy);
// The occurrence check might have caused superTy no longer to be a free type
bool occursFailed = bool(log.getMutable<ErrorTypeVar>(subTy));
if (!occursFailed)
{
if (!occursCheck(subTy, superTy))
log.replace(subTy, BoundTypeVar(superTy));
}
return;
}
else if (superFree && subFree)
{
occursCheck(superTy, subTy);
bool occursFailed = bool(log.getMutable<ErrorTypeVar>(superTy));
if (!occursFailed)
if (!occursCheck(superTy, subTy))
{
if (superFree->level.subsumes(subFree->level))
{
@ -402,24 +395,18 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
}
else if (superFree)
{
TypeLevel superLevel = superFree->level;
occursCheck(superTy, subTy);
bool occursFailed = bool(log.getMutable<ErrorTypeVar>(superTy));
// Unification can't change the level of a generic.
auto subGeneric = log.getMutable<GenericTypeVar>(subTy);
if (subGeneric && !subGeneric->level.subsumes(superLevel))
if (subGeneric && !subGeneric->level.subsumes(superFree->level))
{
// TODO: a more informative error message? CLI-39912
reportError(TypeError{location, GenericError{"Generic subtype escaping scope"}});
return;
}
// The occurrence check might have caused superTy no longer to be a free type
if (!occursFailed)
if (!occursCheck(superTy, subTy))
{
promoteTypeLevels(log, types, superLevel, subTy);
promoteTypeLevels(log, types, superFree->level, subTy);
Widen widen{types};
log.replace(superTy, BoundTypeVar(widen(subTy)));
@ -437,11 +424,6 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
return;
}
TypeLevel subLevel = subFree->level;
occursCheck(subTy, superTy);
bool occursFailed = bool(log.getMutable<ErrorTypeVar>(subTy));
// Unification can't change the level of a generic.
auto superGeneric = log.getMutable<GenericTypeVar>(superTy);
if (superGeneric && !superGeneric->level.subsumes(subFree->level))
@ -451,9 +433,9 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
return;
}
if (!occursFailed)
if (!occursCheck(subTy, superTy))
{
promoteTypeLevels(log, types, subLevel, superTy);
promoteTypeLevels(log, types, subFree->level, superTy);
log.replace(subTy, BoundTypeVar(superTy));
}
@ -1033,9 +1015,7 @@ void Unifier::tryUnify_(TypePackId subTp, TypePackId superTp, bool isFunctionCal
if (log.getMutable<Unifiable::Free>(superTp))
{
occursCheck(superTp, subTp);
if (!log.getMutable<ErrorTypeVar>(superTp))
if (!occursCheck(superTp, subTp))
{
Widen widen{types};
log.replace(superTp, Unifiable::Bound<TypePackId>(widen(subTp)));
@ -1043,9 +1023,7 @@ void Unifier::tryUnify_(TypePackId subTp, TypePackId superTp, bool isFunctionCal
}
else if (log.getMutable<Unifiable::Free>(subTp))
{
occursCheck(subTp, superTp);
if (!log.getMutable<ErrorTypeVar>(subTp))
if (!occursCheck(subTp, superTp))
{
log.replace(subTp, Unifiable::Bound<TypePackId>(superTp));
}
@ -2106,8 +2084,8 @@ void Unifier::unifyLowerBound(TypePackId subTy, TypePackId superTy, TypeLevel de
{
TypePackId tailPack = follow(*t);
if (log.get<FreeTypePack>(tailPack))
occursCheck(tailPack, subTy);
if (log.get<FreeTypePack>(tailPack) && occursCheck(tailPack, subTy))
return;
FreeTypePack* freeTailPack = log.getMutable<FreeTypePack>(tailPack);
if (!freeTailPack)
@ -2180,32 +2158,35 @@ void Unifier::unifyLowerBound(TypePackId subTy, TypePackId superTy, TypeLevel de
}
}
void Unifier::occursCheck(TypeId needle, TypeId haystack)
bool Unifier::occursCheck(TypeId needle, TypeId haystack)
{
sharedState.tempSeenTy.clear();
return occursCheck(sharedState.tempSeenTy, needle, haystack);
}
void Unifier::occursCheck(DenseHashSet<TypeId>& seen, TypeId needle, TypeId haystack)
bool Unifier::occursCheck(DenseHashSet<TypeId>& seen, TypeId needle, TypeId haystack)
{
RecursionLimiter _ra(&sharedState.counters.recursionCount,
FFlag::LuauAutocompleteDynamicLimits ? sharedState.counters.recursionLimit : FInt::LuauTypeInferRecursionLimit);
bool occurrence = false;
auto check = [&](TypeId tv) {
occursCheck(seen, needle, tv);
if (occursCheck(seen, needle, tv))
occurrence = true;
};
needle = log.follow(needle);
haystack = log.follow(haystack);
if (seen.find(haystack))
return;
return false;
seen.insert(haystack);
if (log.getMutable<Unifiable::Error>(needle))
return;
return false;
if (!log.getMutable<Unifiable::Free>(needle))
ice("Expected needle to be free");
@ -2215,11 +2196,11 @@ void Unifier::occursCheck(DenseHashSet<TypeId>& seen, TypeId needle, TypeId hays
reportError(TypeError{location, OccursCheckFailed{}});
log.replace(needle, *getSingletonTypes().errorRecoveryType());
return;
return true;
}
if (log.getMutable<FreeTypeVar>(haystack))
return;
return false;
else if (auto a = log.getMutable<UnionTypeVar>(haystack))
{
for (TypeId ty : a->options)
@ -2235,27 +2216,29 @@ void Unifier::occursCheck(DenseHashSet<TypeId>& seen, TypeId needle, TypeId hays
for (TypeId ty : a->parts)
check(ty);
}
return occurrence;
}
void Unifier::occursCheck(TypePackId needle, TypePackId haystack)
bool Unifier::occursCheck(TypePackId needle, TypePackId haystack)
{
sharedState.tempSeenTp.clear();
return occursCheck(sharedState.tempSeenTp, needle, haystack);
}
void Unifier::occursCheck(DenseHashSet<TypePackId>& seen, TypePackId needle, TypePackId haystack)
bool Unifier::occursCheck(DenseHashSet<TypePackId>& seen, TypePackId needle, TypePackId haystack)
{
needle = log.follow(needle);
haystack = log.follow(haystack);
if (seen.find(haystack))
return;
return false;
seen.insert(haystack);
if (log.getMutable<Unifiable::Error>(needle))
return;
return false;
if (!log.getMutable<Unifiable::Free>(needle))
ice("Expected needle pack to be free");
@ -2270,7 +2253,7 @@ void Unifier::occursCheck(DenseHashSet<TypePackId>& seen, TypePackId needle, Typ
reportError(TypeError{location, OccursCheckFailed{}});
log.replace(needle, *getSingletonTypes().errorRecoveryTypePack());
return;
return true;
}
if (auto a = get<TypePack>(haystack); a && a->tail)
@ -2281,6 +2264,8 @@ void Unifier::occursCheck(DenseHashSet<TypePackId>& seen, TypePackId needle, Typ
break;
}
return false;
}
Unifier Unifier::makeChildUnifier()

511
CLI/Reduce.cpp Normal file
View File

@ -0,0 +1,511 @@
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "Luau/Ast.h"
#include "Luau/Common.h"
#include "Luau/Parser.h"
#include "Luau/Transpiler.h"
#include "FileUtils.h"
#include <algorithm>
#include <stdio.h>
#include <string>
#include <string_view>
#include <queue>
#define VERBOSE 0 // 1 - print out commandline invocations. 2 - print out stdout
#ifdef _WIN32
const auto popen = &_popen;
const auto pclose = &_pclose;
#endif
using namespace Luau;
enum class TestResult
{
BugFound, // We encountered the bug we are trying to isolate
NoBug, // We did not encounter the bug we are trying to isolate
};
struct Enqueuer : public AstVisitor
{
std::queue<AstStatBlock*>* queue;
explicit Enqueuer(std::queue<AstStatBlock*>* queue)
: queue(queue)
{
LUAU_ASSERT(queue);
}
bool visit(AstStatBlock* block) override
{
queue->push(block);
return false;
}
};
struct Reducer
{
Allocator allocator;
AstNameTable nameTable{allocator};
ParseOptions parseOptions;
ParseResult parseResult;
AstStatBlock* root;
std::string tempScriptName;
std::string appName;
std::vector<std::string> appArgs;
std::string_view searchText;
Reducer()
{
parseOptions.captureComments = true;
}
std::string readLine(FILE* f)
{
std::string line = "";
char buffer[256];
while (fgets(buffer, sizeof(buffer), f))
{
auto len = strlen(buffer);
line += std::string(buffer, len);
if (buffer[len - 1] == '\n')
break;
}
return line;
}
void writeTempScript(bool minify = false)
{
std::string source = transpileWithTypes(*root);
if (minify)
{
size_t pos = 0;
do
{
pos = source.find("\n\n", pos);
if (pos == std::string::npos)
break;
source.erase(pos, 1);
} while (true);
}
FILE* f = fopen(tempScriptName.c_str(), "w");
if (!f)
{
printf("Unable to open temp script to %s\n", tempScriptName.c_str());
exit(2);
}
for (const HotComment& comment : parseResult.hotcomments)
fprintf(f, "--!%s\n", comment.content.c_str());
auto written = fwrite(source.data(), 1, source.size(), f);
if (written != source.size())
{
printf("??? %zu %zu\n", written, source.size());
printf("Unable to write to temp script %s\n", tempScriptName.c_str());
exit(3);
}
fclose(f);
}
int step = 0;
std::string escape(const std::string& s)
{
std::string result;
result.reserve(s.size() + 20); // guess
result += '"';
for (char c : s)
{
if (c == '"')
result += '\\';
result += c;
}
result += '"';
return result;
}
TestResult run()
{
writeTempScript();
std::string command = appName + " " + escape(tempScriptName);
for (const auto& arg : appArgs)
command += " " + escape(arg);
#if VERBOSE >= 1
printf("running %s\n", command.c_str());
#endif
TestResult result = TestResult::NoBug;
++step;
printf("Step %4d...\n", step);
FILE* p = popen(command.c_str(), "r");
while (!feof(p))
{
std::string s = readLine(p);
#if VERBOSE >= 2
printf("%s", s.c_str());
#endif
if (std::string::npos != s.find(searchText))
{
result = TestResult::BugFound;
break;
}
}
pclose(p);
return result;
}
std::vector<AstStat*> getNestedStats(AstStat* stat)
{
std::vector<AstStat*> result;
auto append = [&](AstStatBlock* block) {
if (block)
result.insert(result.end(), block->body.data, block->body.data + block->body.size);
};
if (auto block = stat->as<AstStatBlock>())
append(block);
else if (auto ifs = stat->as<AstStatIf>())
{
append(ifs->thenbody);
if (ifs->elsebody)
{
if (AstStatBlock* elseBlock = ifs->elsebody->as<AstStatBlock>())
append(elseBlock);
else if (AstStatIf* elseIf = ifs->elsebody->as<AstStatIf>())
{
auto innerStats = getNestedStats(elseIf);
result.insert(end(result), begin(innerStats), end(innerStats));
}
else
{
printf("AstStatIf's else clause can have more statement types than I thought\n");
LUAU_ASSERT(0);
}
}
}
else if (auto w = stat->as<AstStatWhile>())
append(w->body);
else if (auto r = stat->as<AstStatRepeat>())
append(r->body);
else if (auto f = stat->as<AstStatFor>())
append(f->body);
else if (auto f = stat->as<AstStatForIn>())
append(f->body);
else if (auto f = stat->as<AstStatFunction>())
append(f->func->body);
else if (auto f = stat->as<AstStatLocalFunction>())
append(f->func->body);
return result;
}
// Move new body data into allocator-managed storage so that it's safe to keep around longterm.
AstStat** reallocateStatements(const std::vector<AstStat*>& statements)
{
AstStat** newData = static_cast<AstStat**>(allocator.allocate(sizeof(AstStat*) * statements.size()));
std::copy(statements.data(), statements.data() + statements.size(), newData);
return newData;
}
// Semiopen interval
using Span = std::pair<size_t, size_t>;
// Generates 'chunks' semiopen spans of equal-ish size to span the indeces running from 0 to 'size'
// Also inverses.
std::vector<std::pair<Span, Span>> generateSpans(size_t size, size_t chunks)
{
if (size <= 1)
return {};
LUAU_ASSERT(chunks > 0);
size_t chunkLength = std::max<size_t>(1, size / chunks);
std::vector<std::pair<Span, Span>> result;
auto append = [&result](Span a, Span b) {
if (a.first == a.second && b.first == b.second)
return;
else
result.emplace_back(a, b);
};
size_t i = 0;
while (i < size)
{
size_t end = std::min(i + chunkLength, size);
append(Span{0, i}, Span{end, size});
i = end;
}
i = 0;
while (i < size)
{
size_t end = std::min(i + chunkLength, size);
append(Span{i, end}, Span{size, size});
i = end;
}
return result;
}
// Returns the statements of block within span1 and span2
// Also has the hokey restriction that span1 must come before span2
std::vector<AstStat*> prunedSpan(AstStatBlock* block, Span span1, Span span2)
{
std::vector<AstStat*> result;
for (size_t i = span1.first; i < span1.second; ++i)
result.push_back(block->body.data[i]);
for (size_t i = span2.first; i < span2.second; ++i)
result.push_back(block->body.data[i]);
return result;
}
// returns true if anything was culled plus the chunk count
std::pair<bool, size_t> deleteChildStatements(AstStatBlock* block, size_t chunkCount)
{
if (block->body.size == 0)
return {false, chunkCount};
do
{
auto permutations = generateSpans(block->body.size, chunkCount);
for (const auto& [span1, span2] : permutations)
{
auto tempStatements = prunedSpan(block, span1, span2);
AstArray<AstStat*> backupBody{tempStatements.data(), tempStatements.size()};
std::swap(block->body, backupBody);
TestResult result = run();
if (result == TestResult::BugFound)
{
// The bug still reproduces without the statements we've culled. Commit.
block->body.data = reallocateStatements(tempStatements);
return {true, std::max<size_t>(2, chunkCount - 1)};
}
else
{
// The statements we've culled are critical for the reproduction of the bug.
// TODO try promoting its contents into this scope
std::swap(block->body, backupBody);
}
}
chunkCount *= 2;
} while (chunkCount <= block->body.size);
return {false, block->body.size};
}
bool deleteChildStatements(AstStatBlock* b)
{
bool result = false;
size_t chunkCount = 2;
while (true)
{
auto [workDone, newChunkCount] = deleteChildStatements(b, chunkCount);
if (workDone)
{
result = true;
chunkCount = newChunkCount;
continue;
}
else
break;
}
return result;
}
bool tryPromotingChildStatements(AstStatBlock* b, size_t index)
{
std::vector<AstStat*> tempStats(b->body.data, b->body.data + b->body.size);
AstStat* removed = tempStats.at(index);
tempStats.erase(begin(tempStats) + index);
std::vector<AstStat*> nestedStats = getNestedStats(removed);
tempStats.insert(begin(tempStats) + index, begin(nestedStats), end(nestedStats));
AstArray<AstStat*> tempArray{tempStats.data(), tempStats.size()};
std::swap(b->body, tempArray);
TestResult result = run();
if (result == TestResult::BugFound)
{
b->body.data = reallocateStatements(tempStats);
return true;
}
else
{
std::swap(b->body, tempArray);
return false;
}
}
// We live with some weirdness because I'm kind of lazy: If a statement's
// contents are promoted, we try promoting those prometed statements right
// away. I don't think it matters: If we can delete a statement and still
// exhibit the bug, we should do so. The order isn't so important.
bool tryPromotingChildStatements(AstStatBlock* b)
{
size_t i = 0;
while (i < b->body.size)
{
bool promoted = tryPromotingChildStatements(b, i);
if (!promoted)
++i;
}
return false;
}
void walk(AstStatBlock* block)
{
std::queue<AstStatBlock*> queue;
Enqueuer enqueuer{&queue};
queue.push(block);
while (!queue.empty())
{
AstStatBlock* b = queue.front();
queue.pop();
bool result = false;
do
{
result = deleteChildStatements(b);
/* Try other reductions here before we walk into child statements
* Other reductions to try someday:
*
* Promoting a statement's children to the enclosing block.
* Deleting type annotations
* Deleting parts of type annotations
* Replacing subexpressions with ({} :: any)
* Inlining type aliases
* Inlining constants
* Inlining functions
*/
result |= tryPromotingChildStatements(b);
} while (result);
for (AstStat* stat : b->body)
stat->visit(&enqueuer);
}
}
void run(const std::string scriptName, const std::string appName, const std::vector<std::string>& appArgs, std::string_view source,
std::string_view searchText)
{
tempScriptName = scriptName;
if (tempScriptName.substr(tempScriptName.size() - 4) == ".lua")
{
tempScriptName.erase(tempScriptName.size() - 4);
tempScriptName += "-reduced.lua";
}
else
{
this->tempScriptName = scriptName + "-reduced";
}
#if 0
// Handy debugging trick: VS Code will update its view of the file in realtime as it is edited.
std::string wheee = "code " + tempScriptName;
system(wheee.c_str());
#endif
printf("Temp script: %s\n", tempScriptName.c_str());
this->appName = appName;
this->appArgs = appArgs;
this->searchText = searchText;
parseResult = Parser::parse(source.data(), source.size(), nameTable, allocator, parseOptions);
if (!parseResult.errors.empty())
{
printf("Parse errors\n");
exit(1);
}
root = parseResult.root;
const TestResult initialResult = run();
if (initialResult == TestResult::NoBug)
{
printf("Could not find failure string in the unmodified script! Check your commandline arguments\n");
exit(2);
}
walk(root);
writeTempScript(/* minify */ true);
printf("Done! Check %s\n", tempScriptName.c_str());
}
};
[[noreturn]] void help(const std::vector<std::string_view>& args)
{
printf("Syntax: %s script application \"search text\" [arguments]\n", args[0].data());
exit(1);
}
int main(int argc, char** argv)
{
const std::vector<std::string_view> args(argv, argv + argc);
if (args.size() < 4)
help(args);
for (int i = 1; i < args.size(); ++i)
{
if (args[i] == "--help")
help(args);
}
const std::string scriptName = argv[1];
const std::string appName = argv[2];
const std::string searchText = argv[3];
const std::vector<std::string> appArgs(begin(args) + 4, end(args));
std::optional<std::string> source = readFile(scriptName);
if (!source)
{
printf("Could not read source %s\n", argv[1]);
exit(1);
}
Reducer reducer;
reducer.run(scriptName, appName, appArgs, *source, searchText);
}

View File

@ -32,11 +32,13 @@ if(LUAU_BUILD_CLI)
add_executable(Luau.Repl.CLI)
add_executable(Luau.Analyze.CLI)
add_executable(Luau.Ast.CLI)
add_executable(Luau.Reduce.CLI)
# This also adds target `name` on Linux/macOS and `name.exe` on Windows
set_target_properties(Luau.Repl.CLI PROPERTIES OUTPUT_NAME luau)
set_target_properties(Luau.Analyze.CLI PROPERTIES OUTPUT_NAME luau-analyze)
set_target_properties(Luau.Ast.CLI PROPERTIES OUTPUT_NAME luau-ast)
set_target_properties(Luau.Reduce.CLI PROPERTIES OUTPUT_NAME luau-reduce)
endif()
if(LUAU_BUILD_TESTS)
@ -49,6 +51,7 @@ if(LUAU_BUILD_WEB)
add_executable(Luau.Web)
endif()
include(Sources.cmake)
target_include_directories(Luau.Common INTERFACE Common/include)
@ -171,6 +174,10 @@ if(LUAU_BUILD_CLI)
target_link_libraries(Luau.Analyze.CLI PRIVATE Luau.Analysis)
target_link_libraries(Luau.Ast.CLI PRIVATE Luau.Ast Luau.Analysis)
target_compile_features(Luau.Reduce.CLI PRIVATE cxx_std_17)
target_include_directories(Luau.Reduce.CLI PUBLIC Reduce/include)
target_link_libraries(Luau.Reduce.CLI PRIVATE Luau.Common Luau.Ast Luau.Analysis)
endif()
if(LUAU_BUILD_TESTS)

View File

@ -38,14 +38,21 @@ public:
// Two operand mov instruction has additional specialized encodings
void mov(OperandX64 lhs, OperandX64 rhs);
void mov64(RegisterX64 lhs, int64_t imm);
void movsx(RegisterX64 lhs, OperandX64 rhs);
void movzx(RegisterX64 lhs, OperandX64 rhs);
// Base one operand instruction with 2 opcode selection
void div(OperandX64 op);
void idiv(OperandX64 op);
void mul(OperandX64 op);
void imul(OperandX64 op);
void neg(OperandX64 op);
void not_(OperandX64 op);
// Additional forms of imul
void imul(OperandX64 lhs, OperandX64 rhs);
void imul(OperandX64 dst, OperandX64 lhs, int32_t rhs);
void test(OperandX64 lhs, OperandX64 rhs);
void lea(OperandX64 lhs, OperandX64 rhs);
@ -76,6 +83,12 @@ public:
void vxorpd(OperandX64 dst, OperandX64 src1, OperandX64 src2);
void vcomisd(OperandX64 src1, OperandX64 src2);
void vucomisd(OperandX64 src1, OperandX64 src2);
void vcvttsd2si(OperandX64 dst, OperandX64 src);
void vcvtsi2sd(OperandX64 dst, OperandX64 src1, OperandX64 src2);
void vroundsd(OperandX64 dst, OperandX64 src1, OperandX64 src2, uint8_t mode);
void vsqrtpd(OperandX64 dst, OperandX64 src);
void vsqrtps(OperandX64 dst, OperandX64 src);
@ -105,6 +118,7 @@ public:
OperandX64 f32(float value);
OperandX64 f64(double value);
OperandX64 f32x4(float x, float y, float z, float w);
OperandX64 bytes(const void* ptr, size_t size, size_t align = 8);
// Resulting data and code that need to be copied over one after the other
// The *end* of 'data' has to be aligned to 16 bytes, this will also align 'code'
@ -130,6 +144,8 @@ private:
void placeAvx(const char* name, OperandX64 dst, OperandX64 src, uint8_t code, bool setW, uint8_t mode, uint8_t prefix);
void placeAvx(const char* name, OperandX64 dst, OperandX64 src, uint8_t code, uint8_t coderev, bool setW, uint8_t mode, uint8_t prefix);
void placeAvx(const char* name, OperandX64 dst, OperandX64 src1, OperandX64 src2, uint8_t code, bool setW, uint8_t mode, uint8_t prefix);
void placeAvx(
const char* name, OperandX64 dst, OperandX64 src1, OperandX64 src2, uint8_t imm8, uint8_t code, bool setW, uint8_t mode, uint8_t prefix);
// Instruction components
void placeRegAndModRegMem(OperandX64 lhs, OperandX64 rhs);
@ -157,6 +173,7 @@ private:
LUAU_NOINLINE void log(const char* opcode, OperandX64 op);
LUAU_NOINLINE void log(const char* opcode, OperandX64 op1, OperandX64 op2);
LUAU_NOINLINE void log(const char* opcode, OperandX64 op1, OperandX64 op2, OperandX64 op3);
LUAU_NOINLINE void log(const char* opcode, OperandX64 op1, OperandX64 op2, OperandX64 op3, OperandX64 op4);
LUAU_NOINLINE void log(Label label);
LUAU_NOINLINE void log(const char* opcode, Label label);
void log(OperandX64 op);

View File

@ -46,6 +46,44 @@ const unsigned AVX_F2 = 0b11;
const unsigned kMaxAlign = 16;
// Utility functions to correctly write data on big endian machines
#if defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
#include <endian.h>
static void writeu32(uint8_t* target, uint32_t value)
{
value = htole32(value);
memcpy(target, &value, sizeof(value));
}
static void writeu64(uint8_t* target, uint64_t value)
{
value = htole64(value);
memcpy(target, &value, sizeof(value));
}
static void writef32(uint8_t* target, float value)
{
static_assert(sizeof(float) == sizeof(uint32_t), "type size must match to reinterpret data");
uint32_t data;
memcpy(&data, &value, sizeof(value));
writeu32(target, data);
}
static void writef64(uint8_t* target, double value)
{
static_assert(sizeof(double) == sizeof(uint64_t), "type size must match to reinterpret data");
uint64_t data;
memcpy(&data, &value, sizeof(value));
writeu64(target, data);
}
#else
#define writeu32(target, value) memcpy(target, &value, sizeof(value))
#define writeu64(target, value) memcpy(target, &value, sizeof(value))
#define writef32(target, value) memcpy(target, &value, sizeof(value))
#define writef64(target, value) memcpy(target, &value, sizeof(value))
#endif
AssemblyBuilderX64::AssemblyBuilderX64(bool logText)
: logText(logText)
{
@ -195,6 +233,34 @@ void AssemblyBuilderX64::mov64(RegisterX64 lhs, int64_t imm)
commit();
}
void AssemblyBuilderX64::movsx(RegisterX64 lhs, OperandX64 rhs)
{
if (logText)
log("movsx", lhs, rhs);
LUAU_ASSERT(rhs.memSize == SizeX64::byte || rhs.memSize == SizeX64::word);
placeRex(lhs, rhs);
place(0x0f);
place(rhs.memSize == SizeX64::byte ? 0xbe : 0xbf);
placeRegAndModRegMem(lhs, rhs);
commit();
}
void AssemblyBuilderX64::movzx(RegisterX64 lhs, OperandX64 rhs)
{
if (logText)
log("movzx", lhs, rhs);
LUAU_ASSERT(rhs.memSize == SizeX64::byte || rhs.memSize == SizeX64::word);
placeRex(lhs, rhs);
place(0x0f);
place(rhs.memSize == SizeX64::byte ? 0xb6 : 0xb7);
placeRegAndModRegMem(lhs, rhs);
commit();
}
void AssemblyBuilderX64::div(OperandX64 op)
{
placeUnaryModRegMem("div", op, 0xf6, 0xf7, 6);
@ -210,6 +276,11 @@ void AssemblyBuilderX64::mul(OperandX64 op)
placeUnaryModRegMem("mul", op, 0xf6, 0xf7, 4);
}
void AssemblyBuilderX64::imul(OperandX64 op)
{
placeUnaryModRegMem("imul", op, 0xf6, 0xf7, 5);
}
void AssemblyBuilderX64::neg(OperandX64 op)
{
placeUnaryModRegMem("neg", op, 0xf6, 0xf7, 3);
@ -220,6 +291,41 @@ void AssemblyBuilderX64::not_(OperandX64 op)
placeUnaryModRegMem("not", op, 0xf6, 0xf7, 2);
}
void AssemblyBuilderX64::imul(OperandX64 lhs, OperandX64 rhs)
{
if (logText)
log("imul", lhs, rhs);
placeRex(lhs.base, rhs);
place(0x0f);
place(0xaf);
placeRegAndModRegMem(lhs, rhs);
commit();
}
void AssemblyBuilderX64::imul(OperandX64 dst, OperandX64 lhs, int32_t rhs)
{
if (logText)
log("imul", dst, lhs, rhs);
placeRex(dst.base, lhs);
if (int8_t(rhs) == rhs)
{
place(0x6b);
placeRegAndModRegMem(dst, lhs);
placeImm8(rhs);
}
else
{
place(0x69);
placeRegAndModRegMem(dst, lhs);
placeImm32(rhs);
}
commit();
}
void AssemblyBuilderX64::test(OperandX64 lhs, OperandX64 rhs)
{
// No forms for r/m*, imm8 and reg, r/m*
@ -368,6 +474,26 @@ void AssemblyBuilderX64::vcomisd(OperandX64 src1, OperandX64 src2)
placeAvx("vcomisd", src1, src2, 0x2f, false, AVX_0F, AVX_66);
}
void AssemblyBuilderX64::vucomisd(OperandX64 src1, OperandX64 src2)
{
placeAvx("vucomisd", src1, src2, 0x2e, false, AVX_0F, AVX_66);
}
void AssemblyBuilderX64::vcvttsd2si(OperandX64 dst, OperandX64 src)
{
placeAvx("vcvttsd2si", dst, src, 0x2c, dst.base.size == SizeX64::dword, AVX_0F, AVX_F2);
}
void AssemblyBuilderX64::vcvtsi2sd(OperandX64 dst, OperandX64 src1, OperandX64 src2)
{
placeAvx("vcvtsi2sd", dst, src1, src2, 0x2a, (src2.cat == CategoryX64::reg ? src2.base.size : src2.memSize) == SizeX64::dword, AVX_0F, AVX_F2);
}
void AssemblyBuilderX64::vroundsd(OperandX64 dst, OperandX64 src1, OperandX64 src2, uint8_t mode)
{
placeAvx("vroundsd", dst, src1, src2, mode, 0x0b, false, AVX_0F3A, AVX_66);
}
void AssemblyBuilderX64::vsqrtpd(OperandX64 dst, OperandX64 src)
{
placeAvx("vsqrtpd", dst, src, 0x51, false, AVX_0F, AVX_66);
@ -436,7 +562,7 @@ void AssemblyBuilderX64::finalize()
for (Label fixup : pendingLabels)
{
uint32_t value = labelLocations[fixup.id - 1] - (fixup.location + 4);
memcpy(&code[fixup.location], &value, sizeof(value));
writeu32(&code[fixup.location], value);
}
size_t dataSize = data.size() - dataPos;
@ -479,34 +605,41 @@ void AssemblyBuilderX64::setLabel(Label& label)
OperandX64 AssemblyBuilderX64::i64(int64_t value)
{
size_t pos = allocateData(8, 8);
memcpy(&data[pos], &value, sizeof(value));
writeu64(&data[pos], value);
return OperandX64(SizeX64::qword, noreg, 1, rip, int32_t(pos - data.size()));
}
OperandX64 AssemblyBuilderX64::f32(float value)
{
size_t pos = allocateData(4, 4);
memcpy(&data[pos], &value, sizeof(value));
writef32(&data[pos], value);
return OperandX64(SizeX64::dword, noreg, 1, rip, int32_t(pos - data.size()));
}
OperandX64 AssemblyBuilderX64::f64(double value)
{
size_t pos = allocateData(8, 8);
memcpy(&data[pos], &value, sizeof(value));
writef64(&data[pos], value);
return OperandX64(SizeX64::qword, noreg, 1, rip, int32_t(pos - data.size()));
}
OperandX64 AssemblyBuilderX64::f32x4(float x, float y, float z, float w)
{
size_t pos = allocateData(16, 16);
memcpy(&data[pos], &x, sizeof(x));
memcpy(&data[pos + 4], &y, sizeof(y));
memcpy(&data[pos + 8], &z, sizeof(z));
memcpy(&data[pos + 12], &w, sizeof(w));
writef32(&data[pos], x);
writef32(&data[pos + 4], y);
writef32(&data[pos + 8], z);
writef32(&data[pos + 12], w);
return OperandX64(SizeX64::xmmword, noreg, 1, rip, int32_t(pos - data.size()));
}
OperandX64 AssemblyBuilderX64::bytes(const void* ptr, size_t size, size_t align)
{
size_t pos = allocateData(size, align);
memcpy(&data[pos], ptr, size);
return OperandX64(SizeX64::qword, noreg, 1, rip, int32_t(pos - data.size()));
}
void AssemblyBuilderX64::placeBinary(const char* name, OperandX64 lhs, OperandX64 rhs, uint8_t codeimm8, uint8_t codeimm, uint8_t codeimmImm8,
uint8_t code8rev, uint8_t coderev, uint8_t code8, uint8_t code, uint8_t opreg)
{
@ -700,6 +833,24 @@ void AssemblyBuilderX64::placeAvx(
commit();
}
void AssemblyBuilderX64::placeAvx(
const char* name, OperandX64 dst, OperandX64 src1, OperandX64 src2, uint8_t imm8, uint8_t code, bool setW, uint8_t mode, uint8_t prefix)
{
LUAU_ASSERT(dst.cat == CategoryX64::reg);
LUAU_ASSERT(src1.cat == CategoryX64::reg);
LUAU_ASSERT(src2.cat == CategoryX64::reg || src2.cat == CategoryX64::mem);
if (logText)
log(name, dst, src1, src2, imm8);
placeVex(dst, src1, src2, setW, mode, prefix);
place(code);
placeRegAndModRegMem(dst, src2);
placeImm8(imm8);
commit();
}
void AssemblyBuilderX64::placeRex(RegisterX64 op)
{
uint8_t code = REX_W(op.size == SizeX64::qword) | REX_B(op);
@ -861,16 +1012,18 @@ void AssemblyBuilderX64::placeImm8(int32_t imm)
void AssemblyBuilderX64::placeImm32(int32_t imm)
{
LUAU_ASSERT(codePos + sizeof(imm) < codeEnd);
memcpy(codePos, &imm, sizeof(imm));
codePos += sizeof(imm);
uint8_t* pos = codePos;
LUAU_ASSERT(pos + sizeof(imm) < codeEnd);
writeu32(pos, imm);
codePos = pos + sizeof(imm);
}
void AssemblyBuilderX64::placeImm64(int64_t imm)
{
LUAU_ASSERT(codePos + sizeof(imm) < codeEnd);
memcpy(codePos, &imm, sizeof(imm));
codePos += sizeof(imm);
uint8_t* pos = codePos;
LUAU_ASSERT(pos + sizeof(imm) < codeEnd);
writeu64(pos, imm);
codePos = pos + sizeof(imm);
}
void AssemblyBuilderX64::placeLabel(Label& label)
@ -970,6 +1123,19 @@ void AssemblyBuilderX64::log(const char* opcode, OperandX64 op1, OperandX64 op2,
text.append("\n");
}
void AssemblyBuilderX64::log(const char* opcode, OperandX64 op1, OperandX64 op2, OperandX64 op3, OperandX64 op4)
{
logAppend(" %-12s", opcode);
log(op1);
text.append(",");
log(op2);
text.append(",");
log(op3);
text.append(",");
log(op4);
text.append("\n");
}
void AssemblyBuilderX64::log(Label label)
{
logAppend(".L%d:\n", label.id);

View File

@ -28,6 +28,8 @@ LUAU_FASTFLAGVARIABLE(LuauCompileNoIpairs, false)
LUAU_FASTFLAGVARIABLE(LuauCompileFreeReassign, false)
LUAU_FASTFLAGVARIABLE(LuauCompileXEQ, false)
LUAU_FASTFLAGVARIABLE(LuauCompileOptimalAssignment, false)
namespace Luau
{
@ -37,6 +39,8 @@ static const uint32_t kMaxRegisterCount = 255;
static const uint32_t kMaxUpvalueCount = 200;
static const uint32_t kMaxLocalCount = 200;
static const uint8_t kInvalidReg = 255;
CompileError::CompileError(const Location& location, const std::string& message)
: location(location)
, message(message)
@ -2030,9 +2034,35 @@ struct Compiler
return reg;
}
// initializes target..target+targetCount-1 range using expression
// if expression is a call/vararg, we assume it returns all values, otherwise we fill the rest with nil
// assumes target register range can be clobbered and is at the top of the register space if targetTop = true
void compileExprTempN(AstExpr* node, uint8_t target, uint8_t targetCount, bool targetTop)
{
// we assume that target range is at the top of the register space and can be clobbered
// this is what allows us to compile the last call expression - if it's a call - using targetTop=true
LUAU_ASSERT(!targetTop || unsigned(target + targetCount) == regTop);
if (AstExprCall* expr = node->as<AstExprCall>())
{
compileExprCall(expr, target, targetCount, targetTop);
}
else if (AstExprVarargs* expr = node->as<AstExprVarargs>())
{
compileExprVarargs(expr, target, targetCount);
}
else
{
compileExprTemp(node, target);
for (size_t i = 1; i < targetCount; ++i)
bytecode.emitABC(LOP_LOADNIL, uint8_t(target + i), 0, 0);
}
}
// initializes target..target+targetCount-1 range using expressions from the list
// if list has fewer expressions, and last expression is a call, we assume the call returns the rest of the values
// if list has fewer expressions, and last expression isn't a call, we fill the rest with nil
// if list has fewer expressions, and last expression is multret, we assume it returns the rest of the values
// if list has fewer expressions, and last expression isn't multret, we fill the rest with nil
// assumes target register range can be clobbered and is at the top of the register space if targetTop = true
void compileExprListTemp(const AstArray<AstExpr*>& list, uint8_t target, uint8_t targetCount, bool targetTop)
{
@ -2062,23 +2092,7 @@ struct Compiler
for (size_t i = 0; i < list.size - 1; ++i)
compileExprTemp(list.data[i], uint8_t(target + i));
AstExpr* last = list.data[list.size - 1];
if (AstExprCall* expr = last->as<AstExprCall>())
{
compileExprCall(expr, uint8_t(target + list.size - 1), uint8_t(targetCount - (list.size - 1)), targetTop);
}
else if (AstExprVarargs* expr = last->as<AstExprVarargs>())
{
compileExprVarargs(expr, uint8_t(target + list.size - 1), uint8_t(targetCount - (list.size - 1)));
}
else
{
compileExprTemp(last, uint8_t(target + list.size - 1));
for (size_t i = list.size; i < targetCount; ++i)
bytecode.emitABC(LOP_LOADNIL, uint8_t(target + i), 0, 0);
}
compileExprTempN(list.data[list.size - 1], uint8_t(target + list.size - 1), uint8_t(targetCount - (list.size - 1)), targetTop);
}
else
{
@ -2859,6 +2873,8 @@ struct Compiler
void resolveAssignConflicts(AstStat* stat, std::vector<LValue>& vars)
{
LUAU_ASSERT(!FFlag::LuauCompileOptimalAssignment);
// regsUsed[i] is true if we have assigned the register during earlier assignments
// regsRemap[i] is set to the register where the original (pre-assignment) copy was made
// note: regsRemap is uninitialized intentionally to speed small assignments up; regsRemap[i] is valid iff regsUsed[i]
@ -2911,12 +2927,86 @@ struct Compiler
}
}
struct Assignment
{
LValue lvalue;
uint8_t conflictReg = kInvalidReg;
uint8_t valueReg = kInvalidReg;
};
void resolveAssignConflicts(AstStat* stat, std::vector<Assignment>& vars, const AstArray<AstExpr*>& values)
{
struct Visitor : AstVisitor
{
Compiler* self;
std::bitset<256> conflict;
std::bitset<256> assigned;
Visitor(Compiler* self)
: self(self)
{
}
bool visit(AstExprLocal* node) override
{
int reg = self->getLocalReg(node->local);
if (reg >= 0 && assigned[reg])
conflict[reg] = true;
return true;
}
};
Visitor visitor(this);
// mark any registers that are used *after* assignment as conflicting
for (size_t i = 0; i < vars.size(); ++i)
{
const LValue& li = vars[i].lvalue;
if (i < values.size)
values.data[i]->visit(&visitor);
if (li.kind == LValue::Kind_Local)
visitor.assigned[li.reg] = true;
}
// mark any registers used in trailing expressions as conflicting as well
for (size_t i = vars.size(); i < values.size; ++i)
values.data[i]->visit(&visitor);
// mark any registers used on left hand side that are also assigned anywhere as conflicting
// this is order-independent because we evaluate all right hand side arguments into registers before doing table assignments
for (const Assignment& var : vars)
{
const LValue& li = var.lvalue;
if ((li.kind == LValue::Kind_IndexName || li.kind == LValue::Kind_IndexNumber || li.kind == LValue::Kind_IndexExpr) &&
visitor.assigned[li.reg])
visitor.conflict[li.reg] = true;
if (li.kind == LValue::Kind_IndexExpr && visitor.assigned[li.index])
visitor.conflict[li.index] = true;
}
// for any conflicting var, we need to allocate a temporary register where the assignment is performed, so that we can move the value later
for (Assignment& var : vars)
{
const LValue& li = var.lvalue;
if (li.kind == LValue::Kind_Local && visitor.conflict[li.reg])
var.conflictReg = allocReg(stat, 1);
}
}
void compileStatAssign(AstStatAssign* stat)
{
RegScope rs(this);
// Optimization: one to one assignments don't require complex conflict resolution machinery and allow us to skip temporary registers for
// locals
// Optimization: one to one assignments don't require complex conflict resolution machinery
if (stat->vars.size == 1 && stat->values.size == 1)
{
LValue var = compileLValue(stat->vars.data[0], rs);
@ -2936,28 +3026,110 @@ struct Compiler
return;
}
// compute all l-values: note that this doesn't assign anything yet but it allocates registers and computes complex expressions on the left
// hand side for example, in "a[expr] = foo" expr will get evaluated here
std::vector<LValue> vars(stat->vars.size);
for (size_t i = 0; i < stat->vars.size; ++i)
vars[i] = compileLValue(stat->vars.data[i], rs);
// perform conflict resolution: if any lvalue refers to a local reg that will be reassigned before that, we save the local variable in a
// temporary reg
resolveAssignConflicts(stat, vars);
// compute values into temporaries
uint8_t regs = allocReg(stat, unsigned(stat->vars.size));
compileExprListTemp(stat->values, regs, uint8_t(stat->vars.size), /* targetTop= */ true);
// assign variables that have associated values; note that if we have fewer values than variables, we'll assign nil because
// compileExprListTemp will generate nils
for (size_t i = 0; i < stat->vars.size; ++i)
if (FFlag::LuauCompileOptimalAssignment)
{
setDebugLine(stat->vars.data[i]);
compileAssign(vars[i], uint8_t(regs + i));
// compute all l-values: note that this doesn't assign anything yet but it allocates registers and computes complex expressions on the
// left hand side - for example, in "a[expr] = foo" expr will get evaluated here
std::vector<Assignment> vars(stat->vars.size);
for (size_t i = 0; i < stat->vars.size; ++i)
vars[i].lvalue = compileLValue(stat->vars.data[i], rs);
// perform conflict resolution: if any expression refers to a local that is assigned before evaluating it, we assign to a temporary
// register after this, vars[i].conflictReg is set for locals that need to be assigned in the second pass
resolveAssignConflicts(stat, vars, stat->values);
// compute rhs into (mostly) fresh registers
// note that when the lhs assigment is a local, we evaluate directly into that register
// this is possible because resolveAssignConflicts renamed conflicting locals into temporaries
// after this, vars[i].valueReg is set to a register with the value for *all* vars, but some have already been assigned
for (size_t i = 0; i < stat->vars.size && i < stat->values.size; ++i)
{
AstExpr* value = stat->values.data[i];
if (i + 1 == stat->values.size && stat->vars.size > stat->values.size)
{
// allocate a consecutive range of regs for all remaining vars and compute everything into temps
// note, this also handles trailing nils
uint8_t rest = uint8_t(stat->vars.size - stat->values.size + 1);
uint8_t temp = allocReg(stat, rest);
compileExprTempN(value, temp, rest, /* targetTop= */ true);
for (size_t j = i; j < stat->vars.size; ++j)
vars[j].valueReg = uint8_t(temp + (j - i));
}
else
{
Assignment& var = vars[i];
// if target is a local, use compileExpr directly to target
if (var.lvalue.kind == LValue::Kind_Local)
{
var.valueReg = (var.conflictReg == kInvalidReg) ? var.lvalue.reg : var.conflictReg;
compileExpr(stat->values.data[i], var.valueReg);
}
else
{
var.valueReg = compileExprAuto(stat->values.data[i], rs);
}
}
}
// compute expressions with side effects for lulz
for (size_t i = stat->vars.size; i < stat->values.size; ++i)
{
RegScope rsi(this);
compileExprAuto(stat->values.data[i], rsi);
}
// almost done... let's assign everything left to right, noting that locals were either written-to directly, or will be written-to in a
// separate pass to avoid conflicts
for (const Assignment& var : vars)
{
LUAU_ASSERT(var.valueReg != kInvalidReg);
if (var.lvalue.kind != LValue::Kind_Local)
{
setDebugLine(var.lvalue.location);
compileAssign(var.lvalue, var.valueReg);
}
}
// all regular local writes are done by the prior loops by computing result directly into target, so this just handles conflicts OR
// local copies from temporary registers in multret context, since in that case we have to allocate consecutive temporaries
for (const Assignment& var : vars)
{
if (var.lvalue.kind == LValue::Kind_Local && var.valueReg != var.lvalue.reg)
bytecode.emitABC(LOP_MOVE, var.lvalue.reg, var.valueReg, 0);
}
}
else
{
// compute all l-values: note that this doesn't assign anything yet but it allocates registers and computes complex expressions on the
// left hand side for example, in "a[expr] = foo" expr will get evaluated here
std::vector<LValue> vars(stat->vars.size);
for (size_t i = 0; i < stat->vars.size; ++i)
vars[i] = compileLValue(stat->vars.data[i], rs);
// perform conflict resolution: if any lvalue refers to a local reg that will be reassigned before that, we save the local variable in a
// temporary reg
resolveAssignConflicts(stat, vars);
// compute values into temporaries
uint8_t regs = allocReg(stat, unsigned(stat->vars.size));
compileExprListTemp(stat->values, regs, uint8_t(stat->vars.size), /* targetTop= */ true);
// assign variables that have associated values; note that if we have fewer values than variables, we'll assign nil because
// compileExprListTemp will generate nils
for (size_t i = 0; i < stat->vars.size; ++i)
{
setDebugLine(stat->vars.data[i]);
compileAssign(vars[i], uint8_t(regs + i));
}
}
}

View File

@ -97,8 +97,8 @@ ifeq ($(config),fuzz)
LDFLAGS+=-fsanitize=address,fuzzer
endif
ifneq ($(CALLGRIND),)
CXXFLAGS+=-DCALLGRIND=$(CALLGRIND)
ifeq ($(config),profile)
CXXFLAGS+=-O2 -DNDEBUG -gdwarf-4 -DCALLGRIND=1
endif
# target-specific flags

View File

@ -347,3 +347,11 @@ if(TARGET Luau.Web)
target_sources(Luau.Web PRIVATE
CLI/Web.cpp)
endif()
if(TARGET Luau.Reduce.CLI)
target_sources(Luau.Reduce.CLI PRIVATE
CLI/Reduce.cpp
CLI/FileUtils.cpp
CLI/FileUtils.h
)
endif()

View File

@ -12,9 +12,6 @@ import json
from color import colored, Color
from tabulate import TablePrinter, Alignment
# Based on rotest, specialized for benchmark results
import influxbench
try:
import matplotlib
import matplotlib.pyplot as plt
@ -721,6 +718,7 @@ def run(args, argsubcb):
argumentSubstituionCallback = argsubcb
if arguments.report_metrics or arguments.print_influx_debugging:
import influxbench
influxReporter = influxbench.InfluxReporter(arguments)
else:
influxReporter = None

View File

@ -4,12 +4,7 @@ import platform
import shlex
import socket
import sys
try:
import requests
except:
print("Please install 'requests' using using '{} -m pip install requests' command and try again".format(sys.executable))
exit(-1)
import requests
_hostname = socket.gethostname()

View File

@ -22,7 +22,7 @@ std::string bytecodeAsArray(const std::vector<uint8_t>& bytecode)
class AssemblyBuilderX64Fixture
{
public:
void check(std::function<void(AssemblyBuilderX64& build)> f, std::vector<uint8_t> result)
void check(std::function<void(AssemblyBuilderX64& build)> f, std::vector<uint8_t> code, std::vector<uint8_t> data = {})
{
AssemblyBuilderX64 build(/* logText= */ false);
@ -30,9 +30,15 @@ public:
build.finalize();
if (build.code != result)
if (build.code != code)
{
printf("Expected: %s\nReceived: %s\n", bytecodeAsArray(result).c_str(), bytecodeAsArray(build.code).c_str());
printf("Expected code: %s\nReceived code: %s\n", bytecodeAsArray(code).c_str(), bytecodeAsArray(build.code).c_str());
CHECK(false);
}
if (build.data != data)
{
printf("Expected data: %s\nReceived data: %s\n", bytecodeAsArray(data).c_str(), bytecodeAsArray(build.data).c_str());
CHECK(false);
}
}
@ -169,6 +175,7 @@ TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "BaseUnaryInstructionForms")
SINGLE_COMPARE(div(rcx), 0x48, 0xf7, 0xf1);
SINGLE_COMPARE(idiv(qword[rax]), 0x48, 0xf7, 0x38);
SINGLE_COMPARE(mul(qword[rax + rbx]), 0x48, 0xf7, 0x24, 0x18);
SINGLE_COMPARE(imul(r9), 0x49, 0xf7, 0xe9);
SINGLE_COMPARE(neg(r9), 0x49, 0xf7, 0xd9);
SINGLE_COMPARE(not_(r12), 0x49, 0xf7, 0xd4);
}
@ -191,6 +198,18 @@ TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "FormsOfMov")
SINGLE_COMPARE(mov(byte[rsi], al), 0x88, 0x06);
}
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "FormsOfMovExtended")
{
SINGLE_COMPARE(movsx(eax, byte[rcx]), 0x0f, 0xbe, 0x01);
SINGLE_COMPARE(movsx(r12, byte[r10]), 0x4d, 0x0f, 0xbe, 0x22);
SINGLE_COMPARE(movsx(ebx, word[r11]), 0x41, 0x0f, 0xbf, 0x1b);
SINGLE_COMPARE(movsx(rdx, word[rcx]), 0x48, 0x0f, 0xbf, 0x11);
SINGLE_COMPARE(movzx(eax, byte[rcx]), 0x0f, 0xb6, 0x01);
SINGLE_COMPARE(movzx(r12, byte[r10]), 0x4d, 0x0f, 0xb6, 0x22);
SINGLE_COMPARE(movzx(ebx, word[r11]), 0x41, 0x0f, 0xb7, 0x1b);
SINGLE_COMPARE(movzx(rdx, word[rcx]), 0x48, 0x0f, 0xb7, 0x11);
}
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "FormsOfTest")
{
SINGLE_COMPARE(test(al, 8), 0xf6, 0xc0, 0x08);
@ -230,6 +249,19 @@ TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "FormsOfAbsoluteJumps")
SINGLE_COMPARE(call(qword[r14 + rdx * 4]), 0x49, 0xff, 0x14, 0x96);
}
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "FormsOfImul")
{
SINGLE_COMPARE(imul(ecx, esi), 0x0f, 0xaf, 0xce);
SINGLE_COMPARE(imul(r12, rax), 0x4c, 0x0f, 0xaf, 0xe0);
SINGLE_COMPARE(imul(r12, qword[rdx + rdi]), 0x4c, 0x0f, 0xaf, 0x24, 0x3a);
SINGLE_COMPARE(imul(ecx, edx, 8), 0x6b, 0xca, 0x08);
SINGLE_COMPARE(imul(ecx, r9d, 0xabcd), 0x41, 0x69, 0xc9, 0xcd, 0xab, 0x00, 0x00);
SINGLE_COMPARE(imul(r8d, eax, -9), 0x44, 0x6b, 0xc0, 0xf7);
SINGLE_COMPARE(imul(rcx, rdx, 17), 0x48, 0x6b, 0xca, 0x11);
SINGLE_COMPARE(imul(rcx, r12, 0xabcd), 0x49, 0x69, 0xcc, 0xcd, 0xab, 0x00, 0x00);
SINGLE_COMPARE(imul(r12, rax, -13), 0x4c, 0x6b, 0xe0, 0xf3);
}
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "ControlFlow")
{
// Jump back
@ -335,6 +367,7 @@ TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "AVXUnaryMergeInstructionForms")
// Coverage for other instructions that follow the same pattern
SINGLE_COMPARE(vcomisd(xmm8, xmm10), 0xc4, 0x41, 0xf9, 0x2f, 0xc2);
SINGLE_COMPARE(vucomisd(xmm1, xmm4), 0xc4, 0xe1, 0xf9, 0x2e, 0xcc);
}
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "AVXMoveInstructionForms")
@ -359,6 +392,25 @@ TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "AVXMoveInstructionForms")
SINGLE_COMPARE(vmovups(ymm8, ymmword[r9]), 0xc4, 0x41, 0xfc, 0x10, 0x01);
}
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "AVXConversionInstructionForms")
{
SINGLE_COMPARE(vcvttsd2si(ecx, xmm0), 0xc4, 0xe1, 0x7b, 0x2c, 0xc8);
SINGLE_COMPARE(vcvttsd2si(r9d, xmmword[rcx + rdx]), 0xc4, 0x61, 0x7b, 0x2c, 0x0c, 0x11);
SINGLE_COMPARE(vcvttsd2si(rdx, xmm0), 0xc4, 0xe1, 0xfb, 0x2c, 0xd0);
SINGLE_COMPARE(vcvttsd2si(r13, xmmword[rcx + rdx]), 0xc4, 0x61, 0xfb, 0x2c, 0x2c, 0x11);
SINGLE_COMPARE(vcvtsi2sd(xmm5, xmm10, ecx), 0xc4, 0xe1, 0x2b, 0x2a, 0xe9);
SINGLE_COMPARE(vcvtsi2sd(xmm6, xmm11, dword[rcx + rdx]), 0xc4, 0xe1, 0x23, 0x2a, 0x34, 0x11);
SINGLE_COMPARE(vcvtsi2sd(xmm5, xmm10, r13), 0xc4, 0xc1, 0xab, 0x2a, 0xed);
SINGLE_COMPARE(vcvtsi2sd(xmm6, xmm11, qword[rcx + rdx]), 0xc4, 0xe1, 0xa3, 0x2a, 0x34, 0x11);
}
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "AVXTernaryInstructionForms")
{
SINGLE_COMPARE(vroundsd(xmm7, xmm12, xmm3, 9), 0xc4, 0xe3, 0x99, 0x0b, 0xfb, 0x09);
SINGLE_COMPARE(vroundsd(xmm8, xmm13, xmmword[r13 + rdx], 9), 0xc4, 0x43, 0x91, 0x0b, 0x44, 0x15, 0x00, 0x09);
SINGLE_COMPARE(vroundsd(xmm9, xmm14, xmmword[rcx + r10], 1), 0xc4, 0x23, 0x89, 0x0b, 0x0c, 0x11, 0x01);
}
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "MiscInstructions")
{
SINGLE_COMPARE(int3(), 0xcc);
@ -386,6 +438,11 @@ TEST_CASE("LogTest")
build.neg(qword[rbp + r12 * 2]);
build.mov64(r10, 0x1234567812345678ll);
build.vmovapd(xmmword[rax], xmm11);
build.movzx(eax, byte[rcx]);
build.movsx(rsi, word[r12]);
build.imul(rcx, rdx);
build.imul(rcx, rdx, 8);
build.vroundsd(xmm1, xmm2, xmm3, 5);
build.pop(r12);
build.ret();
build.int3();
@ -409,6 +466,11 @@ TEST_CASE("LogTest")
neg qword ptr [rbp+r12*2]
mov r10,1234567812345678h
vmovapd xmmword ptr [rax],xmm11
movzx eax,byte ptr [rcx]
movsx rsi,word ptr [r12]
imul rcx,rdx
imul rcx,rdx,8
vroundsd xmm1,xmm2,xmm3,5
pop r12
ret
int3
@ -426,6 +488,8 @@ TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "Constants")
build.vmovss(xmm2, build.f32(1.0f));
build.vmovsd(xmm3, build.f64(1.0));
build.vmovaps(xmm4, build.f32x4(1.0f, 2.0f, 4.0f, 8.0f));
char arr[16] = "hello world!123";
build.vmovupd(xmm5, build.bytes(arr, 16, 8));
build.ret();
},
{
@ -434,7 +498,20 @@ TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "Constants")
0xc4, 0xe1, 0xfa, 0x10, 0x15, 0xe1, 0xff, 0xff, 0xff,
0xc4, 0xe1, 0xfb, 0x10, 0x1d, 0xcc, 0xff, 0xff, 0xff,
0xc4, 0xe1, 0xf8, 0x28, 0x25, 0xab, 0xff, 0xff, 0xff,
0xc4, 0xe1, 0xf9, 0x10, 0x2d, 0x92, 0xff, 0xff, 0xff,
0xc3
},
{
'h', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd', '!', '1', '2', '3', 0x0,
0x00, 0x00, 0x80, 0x3f,
0x00, 0x00, 0x00, 0x40,
0x00, 0x00, 0x80, 0x40,
0x00, 0x00, 0x00, 0x41,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // padding to align f32x4
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0, 0x3f,
0x00, 0x00, 0x00, 0x00, // padding to align f64
0x00, 0x00, 0x80, 0x3f,
0x21, 0x43, 0x65, 0x87, 0x78, 0x56, 0x34, 0x12,
});
// clang-format on
}
@ -444,7 +521,7 @@ TEST_CASE("ConstantStorage")
AssemblyBuilderX64 build(/* logText= */ false);
for (int i = 0; i <= 3000; i++)
build.vaddss(xmm0, xmm0, build.f32(float(i)));
build.vaddss(xmm0, xmm0, build.f32(1.0f));
build.finalize();
@ -452,9 +529,10 @@ TEST_CASE("ConstantStorage")
for (int i = 0; i <= 3000; i++)
{
float v;
memcpy(&v, &build.data[build.data.size() - (i + 1) * sizeof(float)], sizeof(v));
LUAU_ASSERT(v == float(i));
LUAU_ASSERT(build.data[i * 4 + 0] == 0x00);
LUAU_ASSERT(build.data[i * 4 + 1] == 0x00);
LUAU_ASSERT(build.data[i * 4 + 2] == 0x80);
LUAU_ASSERT(build.data[i * 4 + 3] == 0x3f);
}
}

View File

@ -2728,46 +2728,44 @@ RETURN R0 0
TEST_CASE("AssignmentConflict")
{
ScopedFastFlag sff("LuauCompileOptimalAssignment", true);
// assignments are left to right
CHECK_EQ("\n" + compileFunction0("local a, b a, b = 1, 2"), R"(
LOADNIL R0
LOADNIL R1
LOADN R2 1
LOADN R3 2
MOVE R0 R2
MOVE R1 R3
LOADN R0 1
LOADN R1 2
RETURN R0 0
)");
// if assignment of a local invalidates a direct register reference in later assignments, the value is evacuated to a temp register
// if assignment of a local invalidates a direct register reference in later assignments, the value is assigned to a temp register first
CHECK_EQ("\n" + compileFunction0("local a a, a[1] = 1, 2"), R"(
LOADNIL R0
MOVE R1 R0
LOADN R2 1
LOADN R3 2
MOVE R0 R2
SETTABLEN R3 R1 1
LOADN R1 1
LOADN R2 2
SETTABLEN R2 R0 1
MOVE R0 R1
RETURN R0 0
)");
// note that this doesn't happen if the local assignment happens last naturally
CHECK_EQ("\n" + compileFunction0("local a a[1], a = 1, 2"), R"(
LOADNIL R0
LOADN R1 1
LOADN R2 2
SETTABLEN R1 R0 1
MOVE R0 R2
LOADN R2 1
LOADN R1 2
SETTABLEN R2 R0 1
MOVE R0 R1
RETURN R0 0
)");
// this will happen if assigned register is used in any table expression, including as an object...
CHECK_EQ("\n" + compileFunction0("local a a, a.foo = 1, 2"), R"(
LOADNIL R0
MOVE R1 R0
LOADN R2 1
LOADN R3 2
MOVE R0 R2
SETTABLEKS R3 R1 K0
LOADN R1 1
LOADN R2 2
SETTABLEKS R2 R0 K0
MOVE R0 R1
RETURN R0 0
)");
@ -2775,22 +2773,20 @@ RETURN R0 0
CHECK_EQ("\n" + compileFunction0("local a a, foo[a] = 1, 2"), R"(
LOADNIL R0
GETIMPORT R1 1
MOVE R2 R0
LOADN R3 1
LOADN R4 2
MOVE R0 R3
SETTABLE R4 R1 R2
LOADN R2 1
LOADN R3 2
SETTABLE R3 R1 R0
MOVE R0 R2
RETURN R0 0
)");
// ... or both ...
CHECK_EQ("\n" + compileFunction0("local a a, a[a] = 1, 2"), R"(
LOADNIL R0
MOVE R1 R0
LOADN R2 1
LOADN R3 2
MOVE R0 R2
SETTABLE R3 R1 R1
LOADN R1 1
LOADN R2 2
SETTABLE R2 R0 R0
MOVE R0 R1
RETURN R0 0
)");
@ -2798,14 +2794,12 @@ RETURN R0 0
CHECK_EQ("\n" + compileFunction0("local a, b a, b, a[b] = 1, 2, 3"), R"(
LOADNIL R0
LOADNIL R1
MOVE R2 R0
MOVE R3 R1
LOADN R4 1
LOADN R5 2
LOADN R6 3
MOVE R0 R4
MOVE R1 R5
SETTABLE R6 R2 R3
LOADN R2 1
LOADN R3 2
LOADN R4 3
SETTABLE R4 R0 R1
MOVE R0 R2
MOVE R1 R3
RETURN R0 0
)");
@ -2815,10 +2809,9 @@ RETURN R0 0
LOADNIL R0
GETIMPORT R1 1
ADDK R2 R0 K2
LOADN R3 1
LOADN R4 2
MOVE R0 R3
SETTABLE R4 R1 R2
LOADN R0 1
LOADN R3 2
SETTABLE R3 R1 R2
RETURN R0 0
)");
}
@ -6242,4 +6235,228 @@ RETURN R2 1
)");
}
TEST_CASE("MultipleAssignments")
{
ScopedFastFlag sff("LuauCompileOptimalAssignment", true);
// order of assignments is left to right
CHECK_EQ("\n" + compileFunction0(R"(
local a, b
a, b = f(1), f(2)
)"),
R"(
LOADNIL R0
LOADNIL R1
GETIMPORT R2 1
LOADN R3 1
CALL R2 1 1
MOVE R0 R2
GETIMPORT R2 1
LOADN R3 2
CALL R2 1 1
MOVE R1 R2
RETURN R0 0
)");
// this includes table assignments
CHECK_EQ("\n" + compileFunction0(R"(
local t
t[1], t[2] = 3, 4
)"),
R"(
LOADNIL R0
LOADNIL R1
LOADN R2 3
LOADN R3 4
SETTABLEN R2 R0 1
SETTABLEN R3 R1 2
RETURN R0 0
)");
// semantically, we evaluate the right hand side first; this allows us to e.g swap elements in a table easily
CHECK_EQ("\n" + compileFunction0(R"(
local t = ...
t[1], t[2] = t[2], t[1]
)"),
R"(
GETVARARGS R0 1
GETTABLEN R1 R0 2
GETTABLEN R2 R0 1
SETTABLEN R1 R0 1
SETTABLEN R2 R0 2
RETURN R0 0
)");
// however, we need to optimize local assignments; to do this well, we need to handle assignment conflicts
// let's first go through a few cases where there are no conflicts:
// when multiple assignments have no conflicts (all local vars are read after being assigned), codegen is the same as a series of single
// assignments
CHECK_EQ("\n" + compileFunction0(R"(
local xm1, x, xp1, xi = ...
xm1,x,xp1,xi = x,xp1,xp1+1,xi-1
)"),
R"(
GETVARARGS R0 4
MOVE R0 R1
MOVE R1 R2
ADDK R2 R2 K0
SUBK R3 R3 K0
RETURN R0 0
)");
// similar example to above from a more complex case
CHECK_EQ("\n" + compileFunction0(R"(
local a, b, c, d, e, f, g, h, t1, t2 = ...
h, g, f, e, d, c, b, a = g, f, e, d + t1, c, b, a, t1 + t2
)"),
R"(
GETVARARGS R0 10
MOVE R7 R6
MOVE R6 R5
MOVE R5 R4
ADD R4 R3 R8
MOVE R3 R2
MOVE R2 R1
MOVE R1 R0
ADD R0 R8 R9
RETURN R0 0
)");
// when locals have a conflict, we assign temporaries instead of locals, and at the end copy the values back
// the basic example of this is a swap/rotate
CHECK_EQ("\n" + compileFunction0(R"(
local a, b = ...
a, b = b, a
)"),
R"(
GETVARARGS R0 2
MOVE R2 R1
MOVE R1 R0
MOVE R0 R2
RETURN R0 0
)");
CHECK_EQ("\n" + compileFunction0(R"(
local a, b, c = ...
a, b, c = c, a, b
)"),
R"(
GETVARARGS R0 3
MOVE R3 R2
MOVE R4 R0
MOVE R2 R1
MOVE R0 R3
MOVE R1 R4
RETURN R0 0
)");
CHECK_EQ("\n" + compileFunction0(R"(
local a, b, c = ...
a, b, c = b, c, a
)"),
R"(
GETVARARGS R0 3
MOVE R3 R1
MOVE R1 R2
MOVE R2 R0
MOVE R0 R3
RETURN R0 0
)");
// multiple assignments with multcall handling - foo() evalutes to temporary registers and they are copied out to target
CHECK_EQ("\n" + compileFunction0(R"(
local a, b, c, d = ...
a, b, c, d = 1, foo()
)"),
R"(
GETVARARGS R0 4
LOADN R0 1
GETIMPORT R4 1
CALL R4 0 3
MOVE R1 R4
MOVE R2 R5
MOVE R3 R6
RETURN R0 0
)");
// note that during this we still need to handle local reassignment, eg when table assignments are performed
CHECK_EQ("\n" + compileFunction0(R"(
local a, b, c, d = ...
a, b[a], c[d], d = 1, foo()
)"),
R"(
GETVARARGS R0 4
LOADN R4 1
GETIMPORT R6 1
CALL R6 0 3
SETTABLE R6 R1 R0
SETTABLE R7 R2 R3
MOVE R0 R4
MOVE R3 R8
RETURN R0 0
)");
// multiple assignments with multcall handling - foo evaluates to a single argument so all remaining locals are assigned to nil
// note that here we don't assign the locals directly, as this case is very rare so we use the similar code path as above
CHECK_EQ("\n" + compileFunction0(R"(
local a, b, c, d = ...
a, b, c, d = 1, foo
)"),
R"(
GETVARARGS R0 4
LOADN R0 1
GETIMPORT R4 1
LOADNIL R5
LOADNIL R6
MOVE R1 R4
MOVE R2 R5
MOVE R3 R6
RETURN R0 0
)");
// note that we also try to use locals as a source of assignment directly when assigning fields; this works using old local value when possible
CHECK_EQ("\n" + compileFunction0(R"(
local a, b = ...
a[1], a[2] = b, b + 1
)"),
R"(
GETVARARGS R0 2
ADDK R2 R1 K0
SETTABLEN R1 R0 1
SETTABLEN R2 R0 2
RETURN R0 0
)");
// ... of course if the local is reassigned, we defer the assignment until later
CHECK_EQ("\n" + compileFunction0(R"(
local a, b = ...
b, a[1] = 42, b
)"),
R"(
GETVARARGS R0 2
LOADN R2 42
SETTABLEN R1 R0 1
MOVE R1 R2
RETURN R0 0
)");
// when there are more expressions when values, we evalute them for side effects, but they also participate in conflict handling
CHECK_EQ("\n" + compileFunction0(R"(
local a, b = ...
a, b = 1, 2, a + b
)"),
R"(
GETVARARGS R0 2
LOADN R2 1
LOADN R3 2
ADD R4 R0 R1
MOVE R0 R2
MOVE R1 R3
RETURN R0 0
)");
}
TEST_SUITE_END();

View File

@ -372,17 +372,25 @@ void Fixture::registerTestTypes()
void Fixture::dumpErrors(const CheckResult& cr)
{
dumpErrors(std::cout, cr.errors);
std::string error = getErrors(cr);
if (!error.empty())
MESSAGE(error);
}
void Fixture::dumpErrors(const ModulePtr& module)
{
dumpErrors(std::cout, module->errors);
std::stringstream ss;
dumpErrors(ss, module->errors);
if (!ss.str().empty())
MESSAGE(ss.str());
}
void Fixture::dumpErrors(const Module& module)
{
dumpErrors(std::cout, module.errors);
std::stringstream ss;
dumpErrors(ss, module.errors);
if (!ss.str().empty())
MESSAGE(ss.str());
}
std::string Fixture::getErrors(const CheckResult& cr)
@ -413,6 +421,7 @@ LoadDefinitionFileResult Fixture::loadDefinition(const std::string& source)
LoadDefinitionFileResult result = frontend.loadDefinitionFile(source, "@test");
freeze(typeChecker.globalTypes);
dumpErrors(result.module);
REQUIRE_MESSAGE(result.success, "loadDefinition: unable to load definition file");
return result;
}
@ -434,7 +443,7 @@ BuiltinsFixture::BuiltinsFixture(bool freeze, bool prepareAutocomplete)
ConstraintGraphBuilderFixture::ConstraintGraphBuilderFixture()
: Fixture()
, cgb(mainModuleName, &arena, NotNull(&ice), frontend.getGlobalScope())
, cgb(mainModuleName, getMainModule(), &arena, NotNull(&ice), frontend.getGlobalScope())
, forceTheFlag{"DebugLuauDeferredConstraintResolution", true}
{
BlockedTypeVar::nextIndex = 0;

View File

@ -1675,7 +1675,7 @@ TEST_CASE_FIXTURE(Fixture, "WrongCommentOptimize")
CHECK_EQ(result.warnings[3].text, "optimize directive uses unknown optimization level '100500', 0..2 expected");
}
TEST_CASE_FIXTURE(Fixture, "LintIntegerParsing")
TEST_CASE_FIXTURE(Fixture, "IntegerParsing")
{
ScopedFastFlag luauLintParseIntegerIssues{"LuauLintParseIntegerIssues", true};
@ -1690,7 +1690,7 @@ local _ = 0x10000000000000000
}
// TODO: remove with FFlagLuauErrorDoubleHexPrefix
TEST_CASE_FIXTURE(Fixture, "LintIntegerParsingDoublePrefix")
TEST_CASE_FIXTURE(Fixture, "IntegerParsingDoublePrefix")
{
ScopedFastFlag luauLintParseIntegerIssues{"LuauLintParseIntegerIssues", true};
ScopedFastFlag luauErrorDoubleHexPrefix{"LuauErrorDoubleHexPrefix", false}; // Lint will be available until we start rejecting code
@ -1707,4 +1707,36 @@ local _ = 0x0xffffffffffffffffffffffffffffffffff
"Hexadecimal number literal has a double prefix, which will fail to parse in the future; remove the extra 0x to fix");
}
TEST_CASE_FIXTURE(Fixture, "ComparisonPrecedence")
{
ScopedFastFlag sff("LuauLintComparisonPrecedence", true);
LintResult result = lint(R"(
local a, b = ...
local _ = not a == b
local _ = not a ~= b
local _ = not a <= b
local _ = a <= b == 0
local _ = not a == not b -- weird but ok
-- silence tests for all of the above
local _ = not (a == b)
local _ = (not a) == b
local _ = not (a ~= b)
local _ = (not a) ~= b
local _ = not (a <= b)
local _ = (not a) <= b
local _ = (a <= b) == 0
local _ = a <= (b == 0)
)");
REQUIRE_EQ(result.warnings.size(), 4);
CHECK_EQ(result.warnings[0].text, "not X == Y is equivalent to (not X) == Y; consider using X ~= Y, or wrap one of the expressions in parentheses to silence");
CHECK_EQ(result.warnings[1].text, "not X ~= Y is equivalent to (not X) ~= Y; consider using X == Y, or wrap one of the expressions in parentheses to silence");
CHECK_EQ(result.warnings[2].text, "not X <= Y is equivalent to (not X) <= Y; wrap one of the expressions in parentheses to silence");
CHECK_EQ(result.warnings[3].text, "X <= Y == Z is equivalent to (X <= Y) == Z; wrap one of the expressions in parentheses to silence");
}
TEST_SUITE_END();

View File

@ -317,4 +317,78 @@ type B = A
CHECK(toString(it->second.type) == "any");
}
TEST_CASE_FIXTURE(BuiltinsFixture, "do_not_clone_reexports")
{
ScopedFastFlag flags[] = {
{"LuauClonePublicInterfaceLess", true},
{"LuauSubstitutionReentrant", true},
{"LuauClassTypeVarsInSubstitution", true},
{"LuauSubstitutionFixMissingFields", true},
};
fileResolver.source["Module/A"] = R"(
export type A = {p : number}
return {}
)";
fileResolver.source["Module/B"] = R"(
local a = require(script.Parent.A)
export type B = {q : a.A}
return {}
)";
CheckResult result = frontend.check("Module/B");
LUAU_REQUIRE_NO_ERRORS(result);
ModulePtr modA = frontend.moduleResolver.getModule("Module/A");
ModulePtr modB = frontend.moduleResolver.getModule("Module/B");
REQUIRE(modA);
REQUIRE(modB);
auto modAiter = modA->getModuleScope()->exportedTypeBindings.find("A");
auto modBiter = modB->getModuleScope()->exportedTypeBindings.find("B");
REQUIRE(modAiter != modA->getModuleScope()->exportedTypeBindings.end());
REQUIRE(modBiter != modB->getModuleScope()->exportedTypeBindings.end());
TypeId typeA = modAiter->second.type;
TypeId typeB = modBiter->second.type;
TableTypeVar* tableB = getMutable<TableTypeVar>(typeB);
REQUIRE(tableB);
CHECK(typeA == tableB->props["q"].type);
}
TEST_CASE_FIXTURE(BuiltinsFixture, "do_not_clone_types_of_reexported_values")
{
ScopedFastFlag flags[] = {
{"LuauClonePublicInterfaceLess", true},
{"LuauSubstitutionReentrant", true},
{"LuauClassTypeVarsInSubstitution", true},
{"LuauSubstitutionFixMissingFields", true},
};
fileResolver.source["Module/A"] = R"(
local exports = {a={p=5}}
return exports
)";
fileResolver.source["Module/B"] = R"(
local a = require(script.Parent.A)
local exports = {b=a.a}
return exports
)";
CheckResult result = frontend.check("Module/B");
LUAU_REQUIRE_NO_ERRORS(result);
ModulePtr modA = frontend.moduleResolver.getModule("Module/A");
ModulePtr modB = frontend.moduleResolver.getModule("Module/B");
REQUIRE(modA);
REQUIRE(modB);
std::optional<TypeId> typeA = first(modA->getModuleScope()->returnType);
std::optional<TypeId> typeB = first(modB->getModuleScope()->returnType);
REQUIRE(typeA);
REQUIRE(typeB);
TableTypeVar* tableA = getMutable<TableTypeVar>(*typeA);
TableTypeVar* tableB = getMutable<TableTypeVar>(*typeB);
CHECK(tableA->props["a"].type == tableB->props["b"].type);
}
TEST_SUITE_END();

View File

@ -169,6 +169,7 @@ TEST_CASE_FIXTURE(Fixture, "table_props_are_any")
REQUIRE(ttv != nullptr);
REQUIRE(ttv->props.count("foo"));
TypeId fooProp = ttv->props["foo"].type;
REQUIRE(fooProp != nullptr);

View File

@ -11,6 +11,33 @@ using namespace Luau;
TEST_SUITE_BEGIN("DefinitionTests");
TEST_CASE_FIXTURE(Fixture, "definition_file_simple")
{
loadDefinition(R"(
declare foo: number
declare function bar(x: number): string
declare foo2: typeof(foo)
)");
TypeId globalFooTy = getGlobalBinding(frontend.typeChecker, "foo");
CHECK_EQ(toString(globalFooTy), "number");
TypeId globalBarTy = getGlobalBinding(frontend.typeChecker, "bar");
CHECK_EQ(toString(globalBarTy), "(number) -> string");
TypeId globalFoo2Ty = getGlobalBinding(frontend.typeChecker, "foo2");
CHECK_EQ(toString(globalFoo2Ty), "number");
CheckResult result = check(R"(
local x: number = foo - 1
local y: string = bar(x)
local z: number | string = x
z = y
)");
LUAU_REQUIRE_NO_ERRORS(result);
}
TEST_CASE_FIXTURE(Fixture, "definition_file_loading")
{
loadDefinition(R"(

View File

@ -845,6 +845,7 @@ TEST_CASE_FIXTURE(Fixture, "generic_table_method")
TableTypeVar* tTable = getMutable<TableTypeVar>(tType);
REQUIRE(tTable != nullptr);
REQUIRE(tTable->props.count("bar"));
TypeId barType = tTable->props["bar"].type;
REQUIRE(barType != nullptr);

View File

@ -398,8 +398,6 @@ caused by:
TEST_CASE_FIXTURE(BuiltinsFixture, "constrained_anyification_clone_immutable_types")
{
ScopedFastFlag luauAnyificationMustClone{"LuauAnyificationMustClone", true};
fileResolver.source["game/A"] = R"(
return function(...) end
)";

View File

@ -1847,6 +1847,7 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "quantifying_a_bound_var_works")
TypeId ty = requireType("clazz");
TableTypeVar* ttv = getMutable<TableTypeVar>(ty);
REQUIRE(ttv);
REQUIRE(ttv->props.count("new"));
Property& prop = ttv->props["new"];
REQUIRE(prop.type);
const FunctionTypeVar* ftv = get<FunctionTypeVar>(follow(prop.type));
@ -2516,6 +2517,7 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "dont_quantify_table_that_belongs_to_outer_sc
TableTypeVar* counterType = getMutable<TableTypeVar>(requireType("Counter"));
REQUIRE(counterType);
REQUIRE(counterType->props.count("new"));
const FunctionTypeVar* newType = get<FunctionTypeVar>(follow(counterType->props["new"].type));
REQUIRE(newType);
@ -3081,8 +3083,6 @@ TEST_CASE_FIXTURE(Fixture, "quantify_even_that_table_was_never_exported_at_all")
TEST_CASE_FIXTURE(BuiltinsFixture, "leaking_bad_metatable_errors")
{
ScopedFastFlag luauIndexSilenceErrors{"LuauIndexSilenceErrors", true};
CheckResult result = check(R"(
local a = setmetatable({}, 1)
local b = a.x

View File

@ -182,6 +182,17 @@ TEST_CASE_FIXTURE(Fixture, "UnionTypeVarIterator_with_empty_union")
CHECK(actual.empty());
}
TEST_CASE_FIXTURE(Fixture, "UnionTypeVarIterator_with_only_cyclic_union")
{
TypeVar tv{UnionTypeVar{}};
auto utv = getMutable<UnionTypeVar>(&tv);
utv->options.push_back(&tv);
utv->options.push_back(&tv);
std::vector<TypeId> actual(begin(utv), end(utv));
CHECK(actual.empty());
}
TEST_CASE_FIXTURE(Fixture, "substitution_skip_failure")
{
TypeVar ftv11{FreeTypeVar{TypeLevel{}}};

View File

@ -49,6 +49,12 @@ assert((function() _G.foo = 1 return _G['foo'] end)() == 1)
assert((function() _G['bar'] = 1 return _G.bar end)() == 1)
assert((function() local a = 1 (function () a = 2 end)() return a end)() == 2)
-- assignments with local conflicts
assert((function() local a, b = 1, {} a, b[a] = 43, -1 return a + b[1] end)() == 42)
assert((function() local a = {} local b = a a[1], a = 43, -1 return a + b[1] end)() == 42)
assert((function() local a, b = 1, {} a, b[a] = (function() return 43, -1 end)() return a + b[1] end)() == 42)
assert((function() local a = {} local b = a a[1], a = (function() return 43, -1 end)() return a + b[1] end)() == 42)
-- upvalues
assert((function() local a = 1 function foo() return a end return foo() end)() == 1)

View File

@ -295,7 +295,7 @@ end
-- testing syntax limits
local syntaxdepth = if limitedstack then 200 else 1000
local syntaxdepth = if limitedstack then 200 else 500
local function testrep (init, rep)
local s = "local a; "..init .. string.rep(rep, syntaxdepth)
local a,b = loadstring(s)

View File

@ -145,6 +145,14 @@ end) == false)
assert(string.format("%*", "a\0b\0c") == "a\0b\0c")
assert(string.format("%*", string.rep("doge", 3000)) == string.rep("doge", 3000))
assert(string.format("%*", 42) == "42")
assert(string.format("%*", true) == "true")
assert(string.format("%*", setmetatable({}, { __tostring = function() return "ok" end })) == "ok")
local ud = newproxy(true)
getmetatable(ud).__tostring = function() return "good" end
assert(string.format("%*", ud) == "good")
assert(pcall(function()
string.format("%#*", "bad form")

View File

@ -1,13 +1,8 @@
AnnotationTests.as_expr_does_not_propagate_type_info
AnnotationTests.as_expr_is_bidirectional
AnnotationTests.as_expr_warns_on_unrelated_cast
AnnotationTests.builtin_types_are_not_exported
AnnotationTests.cannot_use_nonexported_type
AnnotationTests.cloned_interface_maintains_pointers_between_definitions
AnnotationTests.define_generic_type_alias
AnnotationTests.duplicate_type_param_name
AnnotationTests.for_loop_counter_annotation_is_checked
AnnotationTests.function_return_annotations_are_checked
AnnotationTests.generic_aliases_are_cloned_properly
AnnotationTests.interface_types_belong_to_interface_arena
AnnotationTests.luau_ice_triggers_an_ice
@ -18,21 +13,14 @@ AnnotationTests.luau_print_is_magic_if_the_flag_is_set
AnnotationTests.luau_print_is_not_special_without_the_flag
AnnotationTests.occurs_check_on_cyclic_intersection_typevar
AnnotationTests.occurs_check_on_cyclic_union_typevar
AnnotationTests.self_referential_type_alias
AnnotationTests.too_many_type_params
AnnotationTests.two_type_params
AnnotationTests.type_annotations_inside_function_bodies
AnnotationTests.type_assertion_expr
AnnotationTests.unknown_type_reference_generates_error
AnnotationTests.use_type_required_from_another_file
AstQuery.last_argument_function_call_type
AstQuery::getDocumentationSymbolAtPosition.binding
AstQuery::getDocumentationSymbolAtPosition.event_callback_arg
AstQuery::getDocumentationSymbolAtPosition.overloaded_fn
AstQuery::getDocumentationSymbolAtPosition.prop
AutocompleteTest.argument_types
AutocompleteTest.arguments_to_global_lambda
AutocompleteTest.as_types
AutocompleteTest.autocomplete_boolean_singleton
AutocompleteTest.autocomplete_end_with_fn_exprs
AutocompleteTest.autocomplete_end_with_lambda
@ -127,7 +115,6 @@ BuiltinTests.assert_removes_falsy_types2
BuiltinTests.assert_removes_falsy_types_even_from_type_pack_tail_but_only_for_the_first_type
BuiltinTests.assert_returns_false_and_string_iff_it_knows_the_first_argument_cannot_be_truthy
BuiltinTests.bad_select_should_not_crash
BuiltinTests.builtin_tables_sealed
BuiltinTests.coroutine_resume_anything_goes
BuiltinTests.coroutine_wrap_anything_goes
BuiltinTests.debug_info_is_crazy
@ -136,28 +123,20 @@ BuiltinTests.dont_add_definitions_to_persistent_types
BuiltinTests.find_capture_types
BuiltinTests.find_capture_types2
BuiltinTests.find_capture_types3
BuiltinTests.gcinfo
BuiltinTests.getfenv
BuiltinTests.global_singleton_types_are_sealed
BuiltinTests.gmatch_capture_types
BuiltinTests.gmatch_capture_types2
BuiltinTests.gmatch_capture_types_balanced_escaped_parens
BuiltinTests.gmatch_capture_types_default_capture
BuiltinTests.gmatch_capture_types_invalid_pattern_fallback_to_builtin
BuiltinTests.gmatch_capture_types_invalid_pattern_fallback_to_builtin2
BuiltinTests.gmatch_capture_types_leading_end_bracket_is_part_of_set
BuiltinTests.gmatch_capture_types_parens_in_sets_are_ignored
BuiltinTests.gmatch_capture_types_set_containing_lbracket
BuiltinTests.gmatch_definition
BuiltinTests.ipairs_iterator_should_infer_types_and_type_check
BuiltinTests.lua_51_exported_globals_all_exist
BuiltinTests.match_capture_types
BuiltinTests.match_capture_types2
BuiltinTests.math_max_checks_for_numbers
BuiltinTests.math_max_variatic
BuiltinTests.math_things_are_defined
BuiltinTests.next_iterator_should_infer_types_and_type_check
BuiltinTests.no_persistent_typelevel_change
BuiltinTests.os_time_takes_optional_date_table
BuiltinTests.pairs_iterator_should_infer_types_and_type_check
BuiltinTests.see_thru_select
@ -170,7 +149,6 @@ BuiltinTests.select_with_variadic_typepack_tail
BuiltinTests.select_with_variadic_typepack_tail_and_string_head
BuiltinTests.set_metatable_needs_arguments
BuiltinTests.setmetatable_should_not_mutate_persisted_types
BuiltinTests.setmetatable_unpacks_arg_types_correctly
BuiltinTests.sort
BuiltinTests.sort_with_bad_predicate
BuiltinTests.sort_with_predicate
@ -179,6 +157,8 @@ BuiltinTests.string_format_arg_types_inference
BuiltinTests.string_format_as_method
BuiltinTests.string_format_correctly_ordered_types
BuiltinTests.string_format_report_all_type_errors_at_correct_positions
BuiltinTests.string_format_tostring_specifier
BuiltinTests.string_format_tostring_specifier_type_constraint
BuiltinTests.string_format_use_correct_argument
BuiltinTests.string_format_use_correct_argument2
BuiltinTests.string_lib_self_noself
@ -190,53 +170,38 @@ BuiltinTests.table_insert_correctly_infers_type_of_array_3_args_overload
BuiltinTests.table_pack
BuiltinTests.table_pack_reduce
BuiltinTests.table_pack_variadic
BuiltinTests.thread_is_a_type
BuiltinTests.tonumber_returns_optional_number_type
BuiltinTests.tonumber_returns_optional_number_type2
BuiltinTests.xpcall
DefinitionTests.class_definition_function_prop
DefinitionTests.declaring_generic_functions
DefinitionTests.definition_file_class_function_args
DefinitionTests.definition_file_classes
DefinitionTests.definition_file_loading
DefinitionTests.definitions_documentation_symbols
DefinitionTests.documentation_symbols_dont_attach_to_persistent_types
DefinitionTests.single_class_type_identity_in_global_types
FrontendTest.accumulate_cached_errors
FrontendTest.accumulate_cached_errors_in_consistent_order
FrontendTest.any_annotation_breaks_cycle
FrontendTest.ast_node_at_position
FrontendTest.automatically_check_cyclically_dependent_scripts
FrontendTest.automatically_check_dependent_scripts
FrontendTest.check_without_builtin_next
FrontendTest.clearStats
FrontendTest.cycle_detection_between_check_and_nocheck
FrontendTest.cycle_detection_disabled_in_nocheck
FrontendTest.cycle_error_paths
FrontendTest.cycle_errors_can_be_fixed
FrontendTest.cycle_incremental_type_surface
FrontendTest.cycle_incremental_type_surface_longer
FrontendTest.dont_recheck_script_that_hasnt_been_marked_dirty
FrontendTest.dont_reparse_clean_file_when_linting
FrontendTest.environments
FrontendTest.ignore_require_to_nonexistent_file
FrontendTest.imported_table_modification_2
FrontendTest.it_should_be_safe_to_stringify_errors_when_full_type_graph_is_discarded
FrontendTest.no_use_after_free_with_type_fun_instantiation
FrontendTest.nocheck_cycle_used_by_checked
FrontendTest.nocheck_modules_are_typed
FrontendTest.produce_errors_for_unchanged_file_with_a_syntax_error
FrontendTest.re_report_type_error_in_required_file
FrontendTest.recheck_if_dependent_script_is_dirty
FrontendTest.reexport_cyclic_type
FrontendTest.reexport_type_alias
FrontendTest.report_require_to_nonexistent_file
FrontendTest.report_syntax_error_in_required_file
FrontendTest.reports_errors_from_multiple_sources
FrontendTest.stats_are_not_reset_between_checks
FrontendTest.trace_requires_in_nonstrict_mode
GenericsTests.apply_type_function_nested_generics1
GenericsTests.apply_type_function_nested_generics2
GenericsTests.better_mismatch_error_messages
GenericsTests.bound_tables_do_not_clone_original_fields
GenericsTests.calling_self_generic_methods
GenericsTests.check_generic_typepack_function
GenericsTests.check_mutual_generic_functions
GenericsTests.correctly_instantiate_polymorphic_member_functions
@ -265,8 +230,7 @@ GenericsTests.generic_type_pack_unification3
GenericsTests.infer_generic_function_function_argument
GenericsTests.infer_generic_function_function_argument_overloaded
GenericsTests.infer_generic_lib_function_function_argument
GenericsTests.infer_generic_property
GenericsTests.inferred_local_vars_can_be_polytypes
GenericsTests.infer_generic_methods
GenericsTests.instantiate_cyclic_generic_function
GenericsTests.instantiate_generic_function_in_assignments
GenericsTests.instantiate_generic_function_in_assignments2
@ -276,7 +240,6 @@ GenericsTests.local_vars_can_be_instantiated_polytypes
GenericsTests.mutable_state_polymorphism
GenericsTests.no_stack_overflow_from_quantifying
GenericsTests.properties_can_be_instantiated_polytypes
GenericsTests.properties_can_be_polytypes
GenericsTests.rank_N_types_via_typeof
GenericsTests.reject_clashing_generic_and_pack_names
GenericsTests.self_recursive_instantiated_param
@ -287,30 +250,23 @@ IntersectionTypes.error_detailed_intersection_part
IntersectionTypes.fx_intersection_as_argument
IntersectionTypes.fx_union_as_argument_fails
IntersectionTypes.index_on_an_intersection_type_with_mixed_types
IntersectionTypes.index_on_an_intersection_type_with_one_part_missing_the_property
IntersectionTypes.index_on_an_intersection_type_with_one_property_of_type_any
IntersectionTypes.index_on_an_intersection_type_with_property_guaranteed_to_exist
IntersectionTypes.index_on_an_intersection_type_works_at_arbitrary_depth
IntersectionTypes.no_stack_overflow_from_flattenintersection
IntersectionTypes.overload_is_not_a_function
IntersectionTypes.select_correct_union_fn
IntersectionTypes.should_still_pick_an_overload_whose_arguments_are_unions
IntersectionTypes.table_intersection_setmetatable
IntersectionTypes.table_intersection_write
IntersectionTypes.table_intersection_write_sealed
IntersectionTypes.table_intersection_write_sealed_indirect
IntersectionTypes.table_write_sealed_indirect
isSubtype.functions_and_any
isSubtype.intersection_of_functions_of_different_arities
isSubtype.intersection_of_tables
isSubtype.table_with_any_prop
isSubtype.table_with_table_prop
isSubtype.tables
Linter.DeprecatedApi
Linter.TableOperations
ModuleTests.builtin_types_point_into_globalTypes_arena
ModuleTests.clone_self_property
ModuleTests.deepClone_cyclic_table
ModuleTests.do_not_clone_reexports
ModuleTests.do_not_clone_types_of_reexported_values
NonstrictModeTests.delay_function_does_not_require_its_argument_to_return_anything
NonstrictModeTests.for_in_iterator_variables_are_any
NonstrictModeTests.function_parameters_are_any
@ -333,7 +289,6 @@ Normalize.cyclic_intersection
Normalize.cyclic_table_normalizes_sensibly
Normalize.cyclic_union
Normalize.fuzz_failure_bound_type_is_normal_but_not_its_bounded_to
Normalize.fuzz_failure_instersection_combine_must_follow
Normalize.higher_order_function
Normalize.intersection_combine_on_bound_self
Normalize.intersection_inside_a_table_inside_another_intersection
@ -345,13 +300,11 @@ Normalize.intersection_of_disjoint_tables
Normalize.intersection_of_functions
Normalize.intersection_of_overlapping_tables
Normalize.intersection_of_tables_with_indexers
Normalize.nested_table_normalization_with_non_table__no_ice
Normalize.normalization_does_not_convert_ever
Normalize.normalize_module_return_type
Normalize.normalize_unions_containing_never
Normalize.normalize_unions_containing_unknown
Normalize.return_type_is_not_a_constrained_intersection
Normalize.skip_force_normal_on_external_types
Normalize.union_of_distinct_free_types
Normalize.variadic_tail_is_marked_normal
Normalize.visiting_a_type_twice_is_not_considered_normal
@ -365,7 +318,6 @@ ProvisionalTests.constrained_is_level_dependent
ProvisionalTests.discriminate_from_x_not_equal_to_nil
ProvisionalTests.do_not_ice_when_trying_to_pick_first_of_generic_type_pack
ProvisionalTests.error_on_eq_metamethod_returning_a_type_other_than_boolean
ProvisionalTests.free_is_not_bound_to_any
ProvisionalTests.function_returns_many_things_but_first_of_it_is_forgotten
ProvisionalTests.greedy_inference_with_shared_self_triggers_function_with_no_returns
ProvisionalTests.invariant_table_properties_means_instantiating_tables_in_call_is_unsound
@ -380,7 +332,6 @@ ProvisionalTests.typeguard_inference_incomplete
ProvisionalTests.weird_fail_to_unify_type_pack
ProvisionalTests.weirditer_should_not_loop_forever
ProvisionalTests.while_body_are_also_refined
ProvisionalTests.xpcall_returns_what_f_returns
RefinementTest.and_constraint
RefinementTest.and_or_peephole_refinement
RefinementTest.apply_refinements_on_astexprindexexpr_whose_subscript_expr_is_constant_string
@ -420,7 +371,6 @@ RefinementTest.not_and_constraint
RefinementTest.not_t_or_some_prop_of_t
RefinementTest.or_predicate_with_truthy_predicates
RefinementTest.parenthesized_expressions_are_followed_through
RefinementTest.refine_a_property_not_to_be_nil_through_an_intersection_table
RefinementTest.refine_the_correct_types_opposite_of_when_a_is_not_number_or_string
RefinementTest.refine_unknowns
RefinementTest.string_not_equal_to_string_or_nil
@ -456,7 +406,6 @@ TableTests.augment_nested_table
TableTests.augment_table
TableTests.builtin_table_names
TableTests.call_method
TableTests.call_method_with_explicit_self_argument
TableTests.cannot_augment_sealed_table
TableTests.cannot_call_tables
TableTests.cannot_change_type_of_unsealed_table_prop
@ -469,16 +418,13 @@ TableTests.common_table_element_union_in_call_tail
TableTests.confusing_indexing
TableTests.defining_a_method_for_a_builtin_sealed_table_must_fail
TableTests.defining_a_method_for_a_local_sealed_table_must_fail
TableTests.defining_a_method_for_a_local_unsealed_table_is_ok
TableTests.defining_a_self_method_for_a_builtin_sealed_table_must_fail
TableTests.defining_a_self_method_for_a_local_sealed_table_must_fail
TableTests.defining_a_self_method_for_a_local_unsealed_table_is_ok
TableTests.dont_crash_when_setmetatable_does_not_produce_a_metatabletypevar
TableTests.dont_hang_when_trying_to_look_up_in_cyclic_metatable_index
TableTests.dont_invalidate_the_properties_iterator_of_free_table_when_rolled_back
TableTests.dont_leak_free_table_props
TableTests.dont_quantify_table_that_belongs_to_outer_scope
TableTests.dont_seal_an_unsealed_table_by_passing_it_to_a_function_that_takes_a_sealed_table
TableTests.dont_suggest_exact_match_keys
TableTests.error_detailed_indexer_key
TableTests.error_detailed_indexer_value
@ -508,8 +454,6 @@ TableTests.infer_array_2
TableTests.infer_indexer_from_value_property_in_literal
TableTests.inferred_return_type_of_free_table
TableTests.inferring_crazy_table_should_also_be_quick
TableTests.instantiate_table_cloning
TableTests.instantiate_table_cloning_2
TableTests.instantiate_table_cloning_3
TableTests.instantiate_tables_at_scope_level
TableTests.invariant_table_properties_means_instantiating_tables_in_assignment_is_unsound
@ -518,14 +462,12 @@ TableTests.length_operator_intersection
TableTests.length_operator_non_table_union
TableTests.length_operator_union
TableTests.length_operator_union_errors
TableTests.less_exponential_blowup_please
TableTests.meta_add
TableTests.meta_add_both_ways
TableTests.meta_add_inferred
TableTests.metatable_mismatch_should_fail
TableTests.missing_metatable_for_sealed_tables_do_not_get_inferred
TableTests.mixed_tables_with_implicit_numbered_keys
TableTests.MixedPropertiesAndIndexers
TableTests.nil_assign_doesnt_hit_indexer
TableTests.okay_to_add_property_to_unsealed_tables_by_function_call
TableTests.only_ascribe_synthetic_names_at_module_scope
@ -535,8 +477,8 @@ TableTests.open_table_unification_2
TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table
TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table_2
TableTests.pass_incompatible_union_to_a_generic_table_without_crashing
TableTests.passing_compatible_unions_to_a_generic_table_without_crashing
TableTests.persistent_sealed_table_is_immutable
TableTests.prop_access_on_key_whose_types_mismatches
TableTests.property_lookup_through_tabletypevar_metatable
TableTests.quantify_even_that_table_was_never_exported_at_all
TableTests.quantify_metatables_of_metatables_of_table
@ -570,23 +512,17 @@ TableTests.tc_member_function_2
TableTests.top_table_type
TableTests.type_mismatch_on_massive_table_is_cut_short
TableTests.unification_of_unions_in_a_self_referential_type
TableTests.unifying_tables_shouldnt_uaf1
TableTests.unifying_tables_shouldnt_uaf2
TableTests.used_colon_correctly
TableTests.used_colon_instead_of_dot
TableTests.used_dot_instead_of_colon
TableTests.used_dot_instead_of_colon_but_correctly
TableTests.width_subtyping
ToDot.bound_table
ToDot.class
ToDot.function
ToDot.metatable
ToDot.primitive
ToDot.table
ToString.exhaustive_toString_of_cyclic_table
ToString.function_type_with_argument_names_and_self
ToString.function_type_with_argument_names_generic
ToString.named_metatable_toStringNamedFunction
ToString.no_parentheses_around_cyclic_function_type_in_union
ToString.toStringDetailed2
ToString.toStringErrorPack
@ -605,13 +541,12 @@ TryUnifyTests.typepack_unification_should_trim_free_tails
TryUnifyTests.variadics_should_use_reversed_properly
TypeAliases.cli_38393_recursive_intersection_oom
TypeAliases.corecursive_types_generic
TypeAliases.do_not_quantify_unresolved_aliases
TypeAliases.forward_declared_alias_is_not_clobbered_by_prior_unification_with_any
TypeAliases.general_require_multi_assign
TypeAliases.generic_param_remap
TypeAliases.mismatched_generic_pack_type_param
TypeAliases.mismatched_generic_type_param
TypeAliases.mutually_recursive_generic_aliases
TypeAliases.mutually_recursive_types_errors
TypeAliases.mutually_recursive_types_restriction_not_ok_1
TypeAliases.mutually_recursive_types_restriction_not_ok_2
TypeAliases.mutually_recursive_types_swapsies_not_ok
@ -619,29 +554,22 @@ TypeAliases.recursive_types_restriction_not_ok
TypeAliases.stringify_optional_parameterized_alias
TypeAliases.stringify_type_alias_of_recursive_template_table_type
TypeAliases.stringify_type_alias_of_recursive_template_table_type2
TypeAliases.type_alias_import_mutation
TypeAliases.type_alias_fwd_declaration_is_precise
TypeAliases.type_alias_local_mutation
TypeAliases.type_alias_local_rename
TypeAliases.type_alias_of_an_imported_recursive_generic_type
TypeAliases.type_alias_of_an_imported_recursive_type
TypeInfer.check_expr_recursion_limit
TypeInfer.checking_should_not_ice
TypeInfer.cli_50041_committing_txnlog_in_apollo_client_error
TypeInfer.cyclic_follow
TypeInfer.do_not_bind_a_free_table_to_a_union_containing_that_table
TypeInfer.dont_report_type_errors_within_an_AstStatError
TypeInfer.follow_on_new_types_in_substitution
TypeInfer.free_typevars_introduced_within_control_flow_constructs_do_not_get_an_elevated_TypeLevel
TypeInfer.globals
TypeInfer.globals2
TypeInfer.index_expr_should_be_checked
TypeInfer.infer_assignment_value_types
TypeInfer.infer_assignment_value_types_mutable_lval
TypeInfer.infer_through_group_expr
TypeInfer.infer_type_assertion_value_type
TypeInfer.no_heap_use_after_free_error
TypeInfer.no_stack_overflow_from_isoptional
TypeInfer.recursive_metatable_crash
TypeInfer.tc_after_error_recovery_no_replacement_name_in_error
TypeInfer.tc_if_else_expressions1
TypeInfer.tc_if_else_expressions2
@ -650,56 +578,32 @@ TypeInfer.tc_if_else_expressions_expected_type_2
TypeInfer.tc_if_else_expressions_expected_type_3
TypeInfer.tc_if_else_expressions_type_union
TypeInfer.type_infer_recursion_limit_no_ice
TypeInfer.types stored in astResolvedTypes
TypeInfer.warn_on_lowercase_parent_property
TypeInfer.weird_case
TypeInferAnyError.any_type_propagates
TypeInferAnyError.assign_prop_to_table_by_calling_any_yields_any
TypeInferAnyError.call_to_any_yields_any
TypeInferAnyError.calling_error_type_yields_error
TypeInferAnyError.can_get_length_of_any
TypeInferAnyError.can_subscript_any
TypeInferAnyError.CheckMethodsOfAny
TypeInferAnyError.for_in_loop_iterator_is_any
TypeInferAnyError.for_in_loop_iterator_is_any2
TypeInferAnyError.for_in_loop_iterator_is_error
TypeInferAnyError.for_in_loop_iterator_is_error2
TypeInferAnyError.for_in_loop_iterator_returns_any
TypeInferAnyError.for_in_loop_iterator_returns_any2
TypeInferAnyError.indexing_error_type_does_not_produce_an_error
TypeInferAnyError.length_of_error_type_does_not_produce_an_error
TypeInferAnyError.metatable_of_any_can_be_a_table
TypeInferAnyError.prop_access_on_any_with_other_options
TypeInferAnyError.quantify_any_does_not_bind_to_itself
TypeInferAnyError.replace_every_free_type_when_unifying_a_complex_function_with_any
TypeInferAnyError.type_error_addition
TypeInferClasses.assign_to_prop_of_class
TypeInferClasses.call_base_method
TypeInferClasses.call_instance_method
TypeInferClasses.call_method_of_a_child_class
TypeInferClasses.call_method_of_a_class
TypeInferClasses.can_assign_to_prop_of_base_class
TypeInferClasses.can_assign_to_prop_of_base_class_using_string
TypeInferClasses.can_read_prop_of_base_class
TypeInferClasses.can_read_prop_of_base_class_using_string
TypeInferClasses.cannot_call_method_of_child_on_base_instance
TypeInferClasses.cannot_call_unknown_method_of_a_class
TypeInferClasses.cannot_unify_class_instance_with_primitive
TypeInferClasses.class_type_mismatch_with_name_conflict
TypeInferClasses.class_unification_type_mismatch_is_correct_order
TypeInferClasses.classes_can_have_overloaded_operators
TypeInferClasses.classes_without_overloaded_operators_cannot_be_added
TypeInferClasses.detailed_class_unification_error
TypeInferClasses.function_arguments_are_covariant
TypeInferClasses.higher_order_function_arguments_are_contravariant
TypeInferClasses.higher_order_function_return_type_is_not_contravariant
TypeInferClasses.higher_order_function_return_values_are_covariant
TypeInferClasses.optional_class_field_access_error
TypeInferClasses.table_class_unification_reports_sane_errors_for_missing_properties
TypeInferClasses.table_indexers_are_invariant
TypeInferClasses.table_properties_are_invariant
TypeInferClasses.warn_when_prop_almost_matches
TypeInferClasses.we_can_infer_that_a_parameter_must_be_a_particular_class
TypeInferClasses.we_can_report_when_someone_is_trying_to_use_a_table_rather_than_a_class
TypeInferFunctions.another_indirect_function_case_where_it_is_ok_to_provide_too_many_arguments
TypeInferFunctions.another_recursive_local_function
@ -711,21 +615,18 @@ TypeInferFunctions.complicated_return_types_require_an_explicit_annotation
TypeInferFunctions.cyclic_function_type_in_args
TypeInferFunctions.dont_give_other_overloads_message_if_only_one_argument_matching_overload_exists
TypeInferFunctions.dont_infer_parameter_types_for_functions_from_their_call_site
TypeInferFunctions.dont_mutate_the_underlying_head_of_typepack_when_calling_with_self
TypeInferFunctions.duplicate_functions_with_different_signatures_not_allowed_in_nonstrict
TypeInferFunctions.error_detailed_function_mismatch_arg
TypeInferFunctions.error_detailed_function_mismatch_arg_count
TypeInferFunctions.error_detailed_function_mismatch_ret
TypeInferFunctions.error_detailed_function_mismatch_ret_count
TypeInferFunctions.error_detailed_function_mismatch_ret_mult
TypeInferFunctions.first_argument_can_be_optional
TypeInferFunctions.free_is_not_bound_to_unknown
TypeInferFunctions.func_expr_doesnt_leak_free
TypeInferFunctions.function_cast_error_uses_correct_language
TypeInferFunctions.function_decl_non_self_sealed_overwrite
TypeInferFunctions.function_decl_non_self_sealed_overwrite_2
TypeInferFunctions.function_decl_non_self_unsealed_overwrite
TypeInferFunctions.function_decl_quantify_right_type
TypeInferFunctions.function_does_not_return_enough_values
TypeInferFunctions.function_statement_sealed_table_assignment_through_indexer
TypeInferFunctions.higher_order_function_2
@ -737,13 +638,10 @@ TypeInferFunctions.infer_anonymous_function_arguments
TypeInferFunctions.infer_anonymous_function_arguments_outside_call
TypeInferFunctions.infer_return_type_from_selected_overload
TypeInferFunctions.infer_that_function_does_not_return_a_table
TypeInferFunctions.inferred_higher_order_functions_are_quantified_at_the_right_time
TypeInferFunctions.inferred_higher_order_functions_are_quantified_at_the_right_time2
TypeInferFunctions.it_is_ok_not_to_supply_enough_retvals
TypeInferFunctions.it_is_ok_to_oversaturate_a_higher_order_function_argument
TypeInferFunctions.list_all_overloads_if_no_overload_takes_given_argument_count
TypeInferFunctions.list_only_alternative_overloads_that_match_argument_count
TypeInferFunctions.mutual_recursion
TypeInferFunctions.no_lossy_function_type
TypeInferFunctions.occurs_check_failure_in_function_return_type
TypeInferFunctions.quantify_constrained_types
@ -759,10 +657,8 @@ TypeInferFunctions.too_few_arguments_variadic_generic
TypeInferFunctions.too_few_arguments_variadic_generic2
TypeInferFunctions.too_many_arguments
TypeInferFunctions.too_many_return_values
TypeInferFunctions.toposort_doesnt_break_mutual_recursion
TypeInferFunctions.vararg_function_is_quantified
TypeInferFunctions.vararg_functions_should_allow_calls_of_any_types_and_size
TypeInferLoops.correctly_scope_locals_while
TypeInferLoops.for_in_loop
TypeInferLoops.for_in_loop_error_on_factory_not_returning_the_right_amount_of_values
TypeInferLoops.for_in_loop_error_on_iterator_requiring_args_but_none_given
@ -789,14 +685,9 @@ TypeInferLoops.repeat_loop_condition_binds_to_its_block
TypeInferLoops.symbols_in_repeat_block_should_not_be_visible_beyond_until_condition
TypeInferLoops.unreachable_code_after_infinite_loop
TypeInferLoops.varlist_declared_by_for_in_loop_should_be_free
TypeInferLoops.while_loop
TypeInferModules.bound_free_table_export_is_ok
TypeInferModules.constrained_anyification_clone_immutable_types
TypeInferModules.custom_require_global
TypeInferModules.do_not_modify_imported_types
TypeInferModules.do_not_modify_imported_types_2
TypeInferModules.do_not_modify_imported_types_3
TypeInferModules.do_not_modify_imported_types_4
TypeInferModules.general_require_call_expression
TypeInferModules.general_require_type_mismatch
TypeInferModules.module_type_conflict
@ -808,16 +699,14 @@ TypeInferModules.require_module_that_does_not_export
TypeInferModules.require_types
TypeInferModules.type_error_of_unknown_qualified_type
TypeInferModules.warn_if_you_try_to_require_a_non_modulescript
TypeInferOOP.CheckMethodsOfSealed
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_another_overload_works
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_it_wont_help_2
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_not_defined_with_colon
TypeInferOOP.inferred_methods_of_free_tables_have_the_same_level_as_the_enclosing_table
TypeInferOOP.inferring_hundreds_of_self_calls_should_not_suffocate_memory
TypeInferOOP.method_depends_on_table
TypeInferOOP.methods_are_topologically_sorted
TypeInferOOP.nonstrict_self_mismatch_tail
TypeInferOOP.object_constructor_can_refer_to_method_of_self
TypeInferOOP.table_oop
TypeInferOperators.and_adds_boolean
TypeInferOperators.and_adds_boolean_no_superfluous_union
TypeInferOperators.and_binexps_dont_unify
@ -872,37 +761,26 @@ TypeInferPrimitives.string_function_other
TypeInferPrimitives.string_index
TypeInferPrimitives.string_length
TypeInferPrimitives.string_method
TypeInferUnknownNever.array_like_table_of_never_is_inhabitable
TypeInferUnknownNever.assign_to_global_which_is_never
TypeInferUnknownNever.assign_to_local_which_is_never
TypeInferUnknownNever.assign_to_prop_which_is_never
TypeInferUnknownNever.assign_to_subscript_which_is_never
TypeInferUnknownNever.call_never
TypeInferUnknownNever.dont_unify_operands_if_one_of_the_operand_is_never_in_any_ordering_operators
TypeInferUnknownNever.index_on_never
TypeInferUnknownNever.index_on_union_of_tables_for_properties_that_is_never
TypeInferUnknownNever.index_on_union_of_tables_for_properties_that_is_sorta_never
TypeInferUnknownNever.length_of_never
TypeInferUnknownNever.math_operators_and_never
TypeInferUnknownNever.never_is_reflexive
TypeInferUnknownNever.never_subtype_and_string_supertype
TypeInferUnknownNever.pick_never_from_variadic_type_pack
TypeInferUnknownNever.string_subtype_and_never_supertype
TypeInferUnknownNever.string_subtype_and_unknown_supertype
TypeInferUnknownNever.table_with_prop_of_type_never_is_also_reflexive
TypeInferUnknownNever.table_with_prop_of_type_never_is_uninhabitable
TypeInferUnknownNever.type_packs_containing_never_is_itself_uninhabitable
TypeInferUnknownNever.type_packs_containing_never_is_itself_uninhabitable2
TypeInferUnknownNever.unary_minus_of_never
TypeInferUnknownNever.unknown_is_reflexive
TypeInferUnknownNever.unknown_subtype_and_string_supertype
TypePackTests.cyclic_type_packs
TypePackTests.higher_order_function
TypePackTests.multiple_varargs_inference_are_not_confused
TypePackTests.no_return_size_should_be_zero
TypePackTests.pack_tail_unification_check
TypePackTests.parenthesized_varargs_returns_any
TypePackTests.self_and_varargs_should_work
TypePackTests.type_alias_backwards_compatible
TypePackTests.type_alias_default_export
TypePackTests.type_alias_default_mixed_self
@ -913,8 +791,6 @@ TypePackTests.type_alias_default_type_pack_self_tp
TypePackTests.type_alias_default_type_self
TypePackTests.type_alias_defaults_confusing_types
TypePackTests.type_alias_defaults_recursive_type
TypePackTests.type_alias_type_pack_explicit
TypePackTests.type_alias_type_pack_explicit_multi
TypePackTests.type_alias_type_pack_multi
TypePackTests.type_alias_type_pack_variadic
TypePackTests.type_alias_type_packs
@ -949,6 +825,7 @@ TypeSingletons.string_singletons_escape_chars
TypeSingletons.string_singletons_mismatch
TypeSingletons.table_insert_with_a_singleton_argument
TypeSingletons.table_properties_type_error_escapes
TypeSingletons.tagged_unions_immutable_tag
TypeSingletons.tagged_unions_using_singletons
TypeSingletons.taking_the_length_of_string_singleton
TypeSingletons.taking_the_length_of_union_of_string_singleton
@ -978,5 +855,4 @@ UnionTypes.optional_union_members
UnionTypes.optional_union_methods
UnionTypes.return_types_can_be_disjoint
UnionTypes.table_union_write_indirect
UnionTypes.unify_unsealed_table_union_check
UnionTypes.union_equality_comparisons

View File

@ -39,6 +39,16 @@ class Node(svg.Node):
def details(self, root):
return "{} ({:,} bytes, {:.1%}); self: {:,} bytes in {:,} objects".format(self.name, self.width, self.width / root.width, self.size, self.count)
def getkey(heap, obj, key):
pairs = obj.get("pairs", [])
for i in range(0, len(pairs), 2):
if pairs[i] and heap[pairs[i]]["type"] == "string" and heap[pairs[i]]["data"] == key:
if pairs[i + 1] and heap[pairs[i + 1]]["type"] == "string":
return heap[pairs[i + 1]]["data"]
else:
return None
return None
# load files
if arguments.snapshotnew == None:
dumpold = None
@ -50,6 +60,8 @@ else:
with open(arguments.snapshotnew) as f:
dump = json.load(f)
heap = dump["objects"]
# reachability analysis: how much of the heap is reachable from roots?
visited = set()
queue = []
@ -66,7 +78,7 @@ while offset < len(queue):
continue
visited.add(addr)
obj = dump["objects"][addr]
obj = heap[addr]
if not dumpold or not addr in dumpold["objects"]:
node.count += 1
@ -75,17 +87,27 @@ while offset < len(queue):
if obj["type"] == "table":
pairs = obj.get("pairs", [])
weakkey = False
weakval = False
if "metatable" in obj:
modemt = getkey(heap, heap[obj["metatable"]], "__mode")
if modemt:
weakkey = "k" in modemt
weakval = "v" in modemt
for i in range(0, len(pairs), 2):
key = pairs[i+0]
val = pairs[i+1]
if key and val and dump["objects"][key]["type"] == "string":
if key and heap[key]["type"] == "string":
# string keys are always strong
queue.append((key, node))
queue.append((val, node.child(dump["objects"][key]["data"])))
if val and not weakval:
queue.append((val, node.child(heap[key]["data"])))
else:
if key:
if key and not weakkey:
queue.append((key, node))
if val:
if val and not weakval:
queue.append((val, node))
for a in obj.get("array", []):
@ -97,7 +119,7 @@ while offset < len(queue):
source = ""
if "proto" in obj:
proto = dump["objects"][obj["proto"]]
proto = heap[obj["proto"]]
if "source" in proto:
source = proto["source"]

View File

@ -15,12 +15,20 @@ def updatesize(d, k, s):
def sortedsize(p):
return sorted(p, key = lambda s: s[1][1], reverse = True)
def getkey(heap, obj, key):
pairs = obj.get("pairs", [])
for i in range(0, len(pairs), 2):
if pairs[i] and heap[pairs[i]]["type"] == "string" and heap[pairs[i]]["data"] == key:
if pairs[i + 1] and heap[pairs[i + 1]]["type"] == "string":
return heap[pairs[i + 1]]["data"]
else:
return None
return None
with open(sys.argv[1]) as f:
dump = json.load(f)
heap = dump["objects"]
type_addr = next((addr for addr,obj in heap.items() if obj["type"] == "string" and obj["data"] == "__type"), None)
size_type = {}
size_udata = {}
size_category = {}
@ -33,11 +41,7 @@ for addr, obj in heap.items():
if obj["type"] == "userdata" and "metatable" in obj:
metatable = heap[obj["metatable"]]
pairs = metatable.get("pairs", [])
typemt = "unknown"
for i in range(0, len(pairs), 2):
if type_addr and pairs[i] == type_addr and pairs[i + 1] and heap[pairs[i + 1]]["type"] == "string":
typemt = heap[pairs[i + 1]]["data"]
typemt = getkey(heap, metatable, "__type") or "unknown"
updatesize(size_udata, typemt, obj["size"])
print("objects by type:")