From 6ac8b47c17a87f5d114a972211bc725d699f8c41 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Thu, 28 Oct 2021 13:54:39 +0200 Subject: [PATCH 01/11] Generalize search for function type in expected type In particular, handle situations like: val x: (Int => Int) & (Int => Int) = x => x which end up arising in at least one test case during type inference after the avoidance fixes in this PR. --- compiler/src/dotty/tools/dotc/core/Types.scala | 18 +++++++----------- .../src/dotty/tools/dotc/typer/Typer.scala | 4 ++-- tests/neg/i11694.scala | 7 +++++++ 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index b1007a32f5c4..5edb70c4ec53 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1674,23 +1674,19 @@ object Types { case _ => resultType } - /** Find the function type in union. - * If there are multiple function types, NoType is returned. + /** Determine the expected function type from the prototype. If multiple + * function types are found in a union or intersection, their intersection + * is returned. If no function type is found, Any is returned. */ - def findFunctionTypeInUnion(using Context): Type = this match { - case t: OrType => - val t1 = t.tp1.findFunctionTypeInUnion - if t1 == NoType then t.tp2.findFunctionTypeInUnion else - val t2 = t.tp2.findFunctionTypeInUnion - // Returen NoType if the union contains multiple function types - if t2 == NoType then t1 else NoType + def findFunctionType(using Context): Type = dealias match + case tp: AndOrType => + tp.tp1.findFunctionType & tp.tp2.findFunctionType case t if defn.isNonRefinedFunction(t) => t case t @ SAMType(_) => t case _ => - NoType - } + defn.AnyType /** This type seen as a TypeBounds */ final def bounds(using Context): TypeBounds = this match { diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 6177faf6b6b0..592fe59d5e09 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1169,7 +1169,7 @@ class Typer extends Namer pt1 match { case tp: TypeParamRef => decomposeProtoFunction(ctx.typerState.constraint.entry(tp).bounds.hi, defaultArity, pos) - case _ => pt1.findFunctionTypeInUnion match { + case _ => pt1.findFunctionType match { case pt1 if defn.isNonRefinedFunction(pt1) => // if expected parameter type(s) are wildcards, approximate from below. // if expected result type is a wildcard, approximate from above. @@ -1444,7 +1444,7 @@ class Typer extends Namer if (tree.tpt.isEmpty) meth1.tpe.widen match { case mt: MethodType => - pt.findFunctionTypeInUnion match { + pt.findFunctionType match { case pt @ SAMType(sam) if !defn.isFunctionType(pt) && mt <:< sam => // SAMs of the form C[?] where C is a class cannot be conversion targets. diff --git a/tests/neg/i11694.scala b/tests/neg/i11694.scala index 5bbad1a83ce2..67138fd5a7eb 100644 --- a/tests/neg/i11694.scala +++ b/tests/neg/i11694.scala @@ -4,12 +4,19 @@ def test1 = { def f21: (Int => Int) | Null = x => x + 1 def f22: Null | (Int => Int) = x => x + 1 + + def f31: (Int => Int) | (Int => Int) = x => x + 1 + def f32: (Int => Int) | (Int => Int) | Unit = x => x + 1 + + def f41: (Int => Int) & (Int => Int) = x => x + 1 + def f42: (Int => Int) & (Int => Int) & Any = x => x + 1 } def test2 = { def f1: (Int => String) | (Int => Int) | Null = x => x + 1 // error def f2: (Int => String) | Function[String, Int] | Null = x => "" + x // error def f3: Function[Int, Int] | Function[String, Int] | Null = x => x + 1 // error + def f4: (Int => Int) & (Int => Int) & Unit = x => x + 1 // error } def test3 = { From 410896043a2ed07b9ad39e57bea382eebc6ee1b7 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Thu, 28 Oct 2021 16:57:02 +0200 Subject: [PATCH 02/11] ApproximatingTypeMap: fix skolem handling By definition, a skolem is neither a subtype nor a supertype of a different skolem, so regardless of the variance, we shouldn't return a new skolem when approximating an existing skolem. Fixing derivedSkolemType to not do this lets us remove a special-case in `avoid`. --- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 2 -- compiler/src/dotty/tools/dotc/core/Types.scala | 13 +++++++------ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 6a5145ffd202..da5056bf1a00 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -459,8 +459,6 @@ object TypeOps: // Therefore, either they don't appear in the type to be avoided, or // it must be a class that encloses the block whose type is to be avoided. tp - case tp: SkolemType if partsToAvoid(Nil, tp.info).nonEmpty => - range(defn.NothingType, apply(tp.info)) case tp: TypeVar if mapCtx.typerState.constraint.contains(tp) => val lo = TypeComparer.instanceType( tp.origin, fromBelow = variance > 0 || variance == 0 && tp.hasLowerBound)(using mapCtx) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 5edb70c4ec53..3d9ea51544a2 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5941,12 +5941,13 @@ object Types { case Range(lo, hi) => range(bound.bounds.lo, bound.bounds.hi) case _ => tp.derivedMatchType(bound, scrutinee, cases) - override protected def derivedSkolemType(tp: SkolemType, info: Type): Type = info match { - case Range(lo, hi) => - range(tp.derivedSkolemType(lo), tp.derivedSkolemType(hi)) - case _ => - tp.derivedSkolemType(info) - } + override protected def derivedSkolemType(tp: SkolemType, info: Type): Type = + if info eq tp.info then tp + // By definition, a skolem is neither a subtype nor a supertype of a + // different skolem. So, regardless of `variance`, we cannot return a + // fresh skolem when approximating an existing skolem, we can only return + // a range. + else range(defn.NothingType, info) override protected def derivedClassInfo(tp: ClassInfo, pre: Type): Type = { assert(!isRange(pre)) From 8aa6889e4bc5a1b4c437406af935f477db07cad8 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sat, 6 Nov 2021 16:43:43 +0100 Subject: [PATCH 03/11] avoidCaptures: handle local types, not just terms Not that the added test case still infers `Local` for the type of `a` because avoidance doesn't handle type variables correctly and because the nesting level checks are too coarse, this doesn't lead to an error because the check for forward references in TreePickler is currently disabled. All these issues are fixed in later commits of this PR. --- compiler/src/dotty/tools/dotc/core/Types.scala | 4 ++-- tests/pos/i8900a.scala | 11 +++++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i8900a.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 3d9ea51544a2..e990006540b2 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4715,10 +4715,10 @@ object Types { private def avoidCaptures(tp: Type)(using Context): Type = val problemSyms = new TypeAccumulator[Set[Symbol]]: def apply(syms: Set[Symbol], t: Type): Set[Symbol] = t match - case ref @ TermRef(NoPrefix, _) + case ref: NamedType // AVOIDANCE TODO: Are there other problematic kinds of references? // Our current tests only give us these, but we might need to generalize this. - if ref.symbol.maybeOwner.nestingLevel > nestingLevel => + if (ref.prefix eq NoPrefix) && ref.symbol.maybeOwner.nestingLevel > nestingLevel => syms + ref.symbol case _ => foldOver(syms, t) diff --git a/tests/pos/i8900a.scala b/tests/pos/i8900a.scala new file mode 100644 index 000000000000..d34b3dc34516 --- /dev/null +++ b/tests/pos/i8900a.scala @@ -0,0 +1,11 @@ +class Inv[T](val elem: T) +object Test { + def unwrap[Outer](inv: Inv[Outer]): Outer = inv.elem + def wrap[Inner](i: Inner): Inv[Inner] = new Inv(i) + + val a = unwrap({ + class Local + val local = new Local + wrap(local) + }) +} From eb69870b7170a3db78e1a045c9892a3ba0c27b6a Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sat, 6 Nov 2021 16:43:43 +0100 Subject: [PATCH 04/11] Fix nesting level to account for block scopes Previously, the nesting level of a symbol was always equal to the nesting level of its owner incremented by one, but in a situation like i8900a.scala where we have: ... val a = unwrap[?Outer]({ class Local ... the owner of both `?Outer` and `Local` is `a`, and so they ended up with the same level even though `Local` should not leak outside of the block that defines it (i8900a.scala compiled regardless due to the disabled check for forward references in TreePickler re-enabled later in this PR). We rectify this by associating each scope with a level which is always greated than the level of the enclosing scope (repurposing the existing Scope#nestingLevel method which wasn't used for anything), newly created symbols then simply take the level of the current scope (this required tweaking typedCase so that the pattern symbols were typed in the scope were they end up being entered). Also add a `-Yprint-level` option for debugging level-related issues. --- compiler/src/dotty/tools/dotc/Run.scala | 2 +- compiler/src/dotty/tools/dotc/ast/tpd.scala | 3 ++- .../tools/dotc/config/ScalaSettings.scala | 1 + .../src/dotty/tools/dotc/core/Contexts.scala | 4 ++++ .../dotty/tools/dotc/core/Definitions.scala | 7 +++++-- .../src/dotty/tools/dotc/core/Scopes.scala | 17 +++++++-------- .../tools/dotc/core/SymDenotations.scala | 14 ------------- .../dotty/tools/dotc/core/SymbolLoaders.scala | 2 +- .../src/dotty/tools/dotc/core/Symbols.scala | 20 +++++++++--------- .../src/dotty/tools/dotc/core/Types.scala | 6 ++++-- .../dotc/core/classfile/ClassfileParser.scala | 4 ++-- .../core/unpickleScala2/Scala2Unpickler.scala | 2 +- .../tools/dotc/printing/PlainPrinter.scala | 16 +++++++++++--- .../tools/dotc/printing/RefinedPrinter.scala | 10 ++++++++- .../tools/dotc/quoted/MacroExpansion.scala | 2 +- .../tools/dotc/transform/ExpandSAMs.scala | 3 ++- .../dotty/tools/dotc/typer/Implicits.scala | 3 ++- .../src/dotty/tools/dotc/typer/Inliner.scala | 15 +++++++------ .../src/dotty/tools/dotc/typer/Namer.scala | 13 ++++-------- .../src/dotty/tools/dotc/typer/ReTyper.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 21 +++++++++++++++---- 21 files changed, 97 insertions(+), 70 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index b9552d97fca7..120cc0a799f7 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -70,7 +70,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint protected def rootContext(using Context): Context = { ctx.initialize() ctx.base.setPhasePlan(comp.phases) - val rootScope = new MutableScope + val rootScope = new MutableScope(0) val bootstrap = ctx.fresh .setPeriod(Period(comp.nextRunId, FirstPhaseId)) .setScope(rootScope) diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 2db0bd6de2d4..39e24208e3e0 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -7,6 +7,7 @@ import typer.ProtoTypes import transform.SymUtils._ import transform.TypeUtils._ import core._ +import Scopes.newScope import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._, NameOps._ import Symbols._, StdNames._, Annotations._, Trees._, Symbols._ import Decorators._, DenotTransformers._ @@ -344,7 +345,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } else parents val cls = newNormalizedClassSymbol(owner, tpnme.ANON_CLASS, Synthetic | Final, parents1, - coord = fns.map(_.span).reduceLeft(_ union _)) + newScope, coord = fns.map(_.span).reduceLeft(_ union _)) val constr = newConstructor(cls, Synthetic, Nil, Nil).entered def forwarder(fn: TermSymbol, name: TermName) = { val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 13adb0165c77..f6a240655bb8 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -274,6 +274,7 @@ private sealed trait YSettings: val YprintSyms: Setting[Boolean] = BooleanSetting("-Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") val YprintDebug: Setting[Boolean] = BooleanSetting("-Yprint-debug", "When printing trees, print some extra information useful for debugging.") val YprintDebugOwners: Setting[Boolean] = BooleanSetting("-Yprint-debug-owners", "When printing trees, print owners of definitions.") + val YprintLevel: Setting[Boolean] = BooleanSetting("-Yprint-level", "print nesting levels of symbols and type variables.") val YshowPrintErrors: Setting[Boolean] = BooleanSetting("-Yshow-print-errors", "Don't suppress exceptions thrown during tree printing.") val YtestPickler: Setting[Boolean] = BooleanSetting("-Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") val YcheckReentrant: Setting[Boolean] = BooleanSetting("-Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index f10a1cc7372c..3e3f8a800ebf 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -270,6 +270,10 @@ object Contexts { if owner != null && owner.isClass then owner.asClass.unforcedDecls else scope + def nestingLevel: Int = + val sc = effectiveScope + if sc != null then sc.nestingLevel else 0 + /** Sourcefile corresponding to given abstract file, memoized */ def getSource(file: AbstractFile, codec: => Codec = Codec(settings.encoding.value)) = { util.Stats.record("Context.getSource") diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 19ddcf7b16fe..464c7900a54f 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -49,7 +49,10 @@ class Definitions { private def newPermanentClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, infoFn: ClassSymbol => Type) = newClassSymbol(owner, name, flags | Permanent | NoInits | Open, infoFn) - private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope = newScope) = + private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef]): ClassSymbol = + enterCompleteClassSymbol(owner, name, flags, parents, newScope(owner.nestingLevel + 1)) + + private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = @@ -433,7 +436,7 @@ class Definitions { Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_typeTest, Object_eq, Object_ne) @tu lazy val AnyKindClass: ClassSymbol = { - val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil) + val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil, newScope(0)) if (!ctx.settings.YnoKindPolymorphism.value) // Enable kind-polymorphism by exposing scala.AnyKind cls.entered diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala index 1553093a2b87..ecc61530601d 100644 --- a/compiler/src/dotty/tools/dotc/core/Scopes.scala +++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala @@ -75,7 +75,7 @@ object Scopes { */ def size: Int - /** The number of outer scopes from which symbols are inherited */ + /** The number of scopes enclosing this scope. */ def nestingLevel: Int /** The symbols in this scope in the order they were entered; @@ -193,7 +193,7 @@ object Scopes { * This is necessary because when run from reflection every scope needs to have a * SynchronizedScope as mixin. */ - class MutableScope protected[Scopes](initElems: ScopeEntry, initSize: Int, val nestingLevel: Int = 0) + class MutableScope protected[Scopes](initElems: ScopeEntry, initSize: Int, val nestingLevel: Int) extends Scope { /** Scope shares elements with `base` */ @@ -201,7 +201,7 @@ object Scopes { this(base.lastEntry, base.size, base.nestingLevel + 1) ensureCapacity(MinHashedScopeSize) - def this() = this(null, 0, 0) + def this(nestingLevel: Int) = this(null, 0, nestingLevel) private[dotc] var lastEntry: ScopeEntry = initElems @@ -225,7 +225,7 @@ object Scopes { /** Use specified synthesize for this scope */ def useSynthesizer(s: SymbolSynthesizer): Unit = synthesize = s - protected def newScopeLikeThis(): MutableScope = new MutableScope() + protected def newScopeLikeThis(): MutableScope = new MutableScope(nestingLevel) /** Clone scope, taking care not to force the denotations of any symbols in the scope. */ @@ -440,7 +440,10 @@ object Scopes { } /** Create a new scope */ - def newScope: MutableScope = new MutableScope() + def newScope(using Context): MutableScope = + new MutableScope(ctx.nestingLevel + 1) + + def newScope(nestingLevel: Int): MutableScope = new MutableScope(nestingLevel) /** Create a new scope nested in another one with which it shares its elements */ def newNestedScope(outer: Scope)(using Context): MutableScope = new MutableScope(outer) @@ -468,8 +471,4 @@ object Scopes { override def lookupEntry(name: Name)(using Context): ScopeEntry = null override def lookupNextEntry(entry: ScopeEntry)(using Context): ScopeEntry = null } - - /** A class for error scopes (mutable) - */ - class ErrorScope(owner: Symbol) extends MutableScope } diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 7f47f1696cd6..9d55f43dff25 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1477,14 +1477,6 @@ object SymDenotations { else if is(Contravariant) then Contravariant else EmptyFlags - /** The length of the owner chain of this symbol. 1 for _root_, 0 for NoSymbol */ - def nestingLevel(using Context): Int = - @tailrec def recur(d: SymDenotation, n: Int): Int = d match - case NoDenotation => n - case d: ClassDenotation => d.nestingLevel + n // profit from the cache in ClassDenotation - case _ => recur(d.owner, n + 1) - recur(this, 0) - /** The flags to be used for a type parameter owned by this symbol. * Overridden by ClassDenotation. */ @@ -2323,12 +2315,6 @@ object SymDenotations { override def registeredCompanion_=(c: Symbol) = myCompanion = c - - private var myNestingLevel = -1 - - override def nestingLevel(using Context) = - if myNestingLevel == -1 then myNestingLevel = owner.nestingLevel + 1 - myNestingLevel } /** The denotation of a package class. diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 26e6e659779e..83198ab65857 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -221,7 +221,7 @@ object SymbolLoaders { /** The scope of a package. This is different from a normal scope * in that names of scope entries are kept in mangled form. */ - final class PackageScope extends MutableScope { + final class PackageScope extends MutableScope(0) { override def newScopeEntry(name: Name, sym: Symbol)(using Context): ScopeEntry = super.newScopeEntry(name.mangled, sym) diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index e49399ff9791..7f0969d55f07 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -46,7 +46,7 @@ object Symbols { * @param coord The coordinates of the symbol (a position or an index) * @param id A unique identifier of the symbol (unique per ContextBase) */ - class Symbol private[Symbols] (private var myCoord: Coord, val id: Int) + class Symbol private[Symbols] (private var myCoord: Coord, val id: Int, val nestingLevel: Int) extends Designator, ParamInfo, SrcPos, printing.Showable { type ThisName <: Name @@ -368,8 +368,8 @@ object Symbols { type TermSymbol = Symbol { type ThisName = TermName } type TypeSymbol = Symbol { type ThisName = TypeName } - class ClassSymbol private[Symbols] (coord: Coord, val assocFile: AbstractFile, id: Int) - extends Symbol(coord, id) { + class ClassSymbol private[Symbols] (coord: Coord, val assocFile: AbstractFile, id: Int, nestingLevel: Int) + extends Symbol(coord, id, nestingLevel) { type ThisName = TypeName @@ -459,7 +459,7 @@ object Symbols { override protected def prefixString: String = "ClassSymbol" } - @sharable object NoSymbol extends Symbol(NoCoord, 0) { + @sharable object NoSymbol extends Symbol(NoCoord, 0, 0) { override def associatedFile(using Context): AbstractFile = NoSource.file override def recomputeDenot(lastd: SymDenotation)(using Context): SymDenotation = NoDenotation } @@ -516,7 +516,7 @@ object Symbols { info: Type, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord)(using Context): Symbol { type ThisName = N } = { - val sym = new Symbol(coord, ctx.base.nextSymId).asInstanceOf[Symbol { type ThisName = N }] + val sym = new Symbol(coord, ctx.base.nextSymId, ctx.nestingLevel).asInstanceOf[Symbol { type ThisName = N }] val denot = SymDenotation(sym, owner, name, flags, info, privateWithin) sym.denot = denot sym @@ -534,7 +534,7 @@ object Symbols { coord: Coord = NoCoord, assocFile: AbstractFile = null)(using Context): ClassSymbol = { - val cls = new ClassSymbol(coord, assocFile, ctx.base.nextSymId) + val cls = new ClassSymbol(coord, assocFile, ctx.base.nextSymId, ctx.nestingLevel) val denot = SymDenotation(cls, owner, name, flags, infoFn(cls), privateWithin) cls.denot = denot cls @@ -546,7 +546,7 @@ object Symbols { name: TypeName, flags: FlagSet, parents: List[TypeRef], - decls: Scope = newScope, + decls: Scope, selfInfo: Type = NoType, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, @@ -564,7 +564,7 @@ object Symbols { name: TypeName, flags: FlagSet, parentTypes: List[Type], - decls: Scope = newScope, + decls: Scope, selfInfo: Type = NoType, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, @@ -580,7 +580,7 @@ object Symbols { } def newRefinedClassSymbol(coord: Coord = NoCoord)(using Context): ClassSymbol = - newCompleteClassSymbol(ctx.owner, tpnme.REFINE_CLASS, NonMember, parents = Nil, coord = coord) + newCompleteClassSymbol(ctx.owner, tpnme.REFINE_CLASS, NonMember, parents = Nil, newScope, coord = coord) /** Create a module symbol with associated module class * from its non-info fields and a function producing the info @@ -646,7 +646,7 @@ object Symbols { name: TermName, modFlags: FlagSet = EmptyFlags, clsFlags: FlagSet = EmptyFlags, - decls: Scope = newScope)(using Context): TermSymbol = + decls: Scope = newScope(0))(using Context): TermSymbol = newCompleteModuleSymbol( owner, name, modFlags | PackageCreationFlags, clsFlags | PackageCreationFlags, diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index e990006540b2..59b8405f0e4c 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4667,7 +4667,7 @@ object Types { * @param origin The parameter that's tracked by the type variable. * @param creatorState The typer state in which the variable was created. */ - final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState, nestingLevel: Int) extends CachedProxyType with ValueType { + final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState, val nestingLevel: Int) extends CachedProxyType with ValueType { private var currentOrigin = initOrigin @@ -4713,6 +4713,8 @@ object Types { * are nested more deeply than the type variable itself. */ private def avoidCaptures(tp: Type)(using Context): Type = + if ctx.isAfterTyper then + return tp val problemSyms = new TypeAccumulator[Set[Symbol]]: def apply(syms: Set[Symbol], t: Type): Set[Symbol] = t match case ref: NamedType @@ -4806,7 +4808,7 @@ object Types { } object TypeVar: def apply(initOrigin: TypeParamRef, creatorState: TyperState)(using Context) = - new TypeVar(initOrigin, creatorState, ctx.owner.nestingLevel) + new TypeVar(initOrigin, creatorState, ctx.nestingLevel) type TypeVars = SimpleIdentitySet[TypeVar] diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 410bc6e591d4..c0140c5b8641 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -62,8 +62,8 @@ class ClassfileParser( protected val staticModule: Symbol = moduleRoot.sourceModule(using ictx) - protected val instanceScope: MutableScope = newScope // the scope of all instance definitions - protected val staticScope: MutableScope = newScope // the scope of all static definitions + protected val instanceScope: MutableScope = newScope(0) // the scope of all instance definitions + protected val staticScope: MutableScope = newScope(0) // the scope of all static definitions protected var pool: ConstantPool = _ // the classfile's constant pool protected var currentClassName: SimpleName = _ // JVM name of the current class diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 18f3e0b19d87..305a8a7510df 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -233,7 +233,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } /** The `decls` scope associated with given symbol */ - protected def symScope(sym: Symbol): Scope = symScopes.getOrElseUpdate(sym, newScope) + protected def symScope(sym: Symbol): Scope = symScopes.getOrElseUpdate(sym, newScope(0)) /** Does entry represent an (internal) symbol */ protected def isSymbolEntry(i: Int)(using Context): Boolean = { diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 3a3fca5e7f90..bc6258398f54 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -29,6 +29,7 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def maxToTextRecursions: Int = 100 protected def showUniqueIds = ctx.settings.uniqid.value || Printer.debugPrintUnique + protected def showNestingLevel = ctx.settings.YprintLevel.value protected final def limiter: MessageLimiter = ctx.property(MessageLimiter).get @@ -155,7 +156,12 @@ class PlainPrinter(_ctx: Context) extends Printer { case tp: TermParamRef => ParamRefNameString(tp) ~ lambdaHash(tp.binder) ~ ".type" case tp: TypeParamRef => - ParamRefNameString(tp) ~ lambdaHash(tp.binder) + val suffix = + if showNestingLevel then + val tvar = ctx.typerState.constraint.typeVarOfParam(tp) + if tvar.exists then s"#${tvar.asInstanceOf[TypeVar].nestingLevel.toString}" else "" + else "" + ParamRefNameString(tp) ~ lambdaHash(tp.binder) ~ suffix case tp: SingletonType => toTextSingleton(tp) case AppliedType(tycon, args) => @@ -271,9 +277,13 @@ class PlainPrinter(_ctx: Context) extends Printer { catch { case ex: NullPointerException => "" } else "" - /** If -uniqid is set, the unique id of symbol, after a # */ + /** A string to append to a symbol composed of: + * - if -uniqid is set, its unique id after a #. + * - if -Yprint-level, its nesting level after a %. + */ protected def idString(sym: Symbol): String = - if (showUniqueIds || Printer.debugPrintUnique) "#" + sym.id else "" + (if (showUniqueIds || Printer.debugPrintUnique) "#" + sym.id else "") + + (if (showNestingLevel) "%" + sym.nestingLevel else "") def nameString(sym: Symbol): String = simpleNameString(sym) + idString(sym) // + "<" + (if (sym.exists) sym.owner else "") + ">" diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index ef11ec9434ae..cf5942a178f0 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -798,7 +798,15 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def optAscription[T >: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _) private def idText(tree: untpd.Tree): Text = - if showUniqueIds && tree.hasType && tree.symbol.exists then s"#${tree.symbol.id}" else "" + (if showUniqueIds && tree.hasType && tree.symbol.exists then s"#${tree.symbol.id}" else "") ~ + (if showNestingLevel then tree.typeOpt match + case tp: NamedType if !tp.symbol.isStatic => s"%${tp.symbol.nestingLevel}" + case tp: TypeVar => s"%${tp.nestingLevel}" + case tp: TypeParamRef => ctx.typerState.constraint.typeVarOfParam(tp) match + case tvar: TypeVar => s"%${tvar.nestingLevel}" + case _ => "" + case _ => "" + else "") private def useSymbol(tree: untpd.Tree) = tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value diff --git a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala index 686998a94800..628afe4504bd 100644 --- a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala +++ b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala @@ -14,6 +14,6 @@ object MacroExpansion { ctx.property(MacroExpansionPosition) def context(inlinedFrom: tpd.Tree)(using Context): Context = - QuotesCache.init(ctx.fresh).setProperty(MacroExpansionPosition, SourcePosition(inlinedFrom.source, inlinedFrom.span)).setTypeAssigner(new Typer).withSource(inlinedFrom.source) + QuotesCache.init(ctx.fresh).setProperty(MacroExpansionPosition, SourcePosition(inlinedFrom.source, inlinedFrom.span)).setTypeAssigner(new Typer(ctx.nestingLevel + 1)).withSource(inlinedFrom.source) } diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index b87686996ebd..ea9e7ede1b9a 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -3,6 +3,7 @@ package dotc package transform import core._ +import Scopes.newScope import Contexts._, Symbols._, Types._, Flags._, Decorators._, StdNames._, Constants._ import MegaPhase._ import SymUtils._ @@ -123,7 +124,7 @@ class ExpandSAMs extends MiniPhase: val parents = List( defn.AbstractPartialFunctionClass.typeRef.appliedTo(anonTpe.firstParamTypes.head, anonTpe.resultType), defn.SerializableType) - val pfSym = newNormalizedClassSymbol(anonSym.owner, tpnme.ANON_CLASS, Synthetic | Final, parents, coord = tree.span) + val pfSym = newNormalizedClassSymbol(anonSym.owner, tpnme.ANON_CLASS, Synthetic | Final, parents, newScope, coord = tree.span) def overrideSym(sym: Symbol) = sym.copy( owner = pfSym, diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index d7bf75f8bf73..ff313edfdefa 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -25,6 +25,7 @@ import Constants._ import ProtoTypes._ import ErrorReporting._ import Inferencing.{fullyDefinedType, isFullyDefined} +import Scopes.newScope import Trees._ import transform.SymUtils._ import transform.TypeUtils._ @@ -1774,7 +1775,7 @@ final class SearchRoot extends SearchHistory: // } val parents = List(defn.ObjectType, defn.SerializableType) - val classSym = newNormalizedClassSymbol(ctx.owner, LazyImplicitName.fresh().toTypeName, Synthetic | Final, parents, coord = span) + val classSym = newNormalizedClassSymbol(ctx.owner, LazyImplicitName.fresh().toTypeName, Synthetic | Final, parents, newScope, coord = span) val vsyms = pruned.map(_._1.symbol) val nsyms = vsyms.map(vsym => newSymbol(classSym, vsym.name, EmptyFlags, vsym.info, coord = span).entered) val vsymMap = (vsyms zip nsyms).toMap diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 4fede9b6b9e4..6ab7e156d1f5 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -20,6 +20,7 @@ import ProtoTypes.shallowSelectionProto import Annotations.Annotation import SymDenotations.SymDenotation import Inferencing.isFullyDefined +import Scopes.newScope import config.Printers.inlining import config.Feature import ErrorReporting.errorTree @@ -34,6 +35,8 @@ import util.Spans.Span import dotty.tools.dotc.transform.{Splicer, TreeMapWithStages} import quoted.QuoteUtils +import scala.annotation.constructorOnly + object Inliner { import tpd._ @@ -196,7 +199,7 @@ object Inliner { val UnApply(fun, implicits, patterns) = unapp val sym = unapp.symbol - val cls = newNormalizedClassSymbol(ctx.owner, tpnme.ANON_CLASS, Synthetic | Final, List(defn.ObjectType), coord = sym.coord) + val cls = newNormalizedClassSymbol(ctx.owner, tpnme.ANON_CLASS, Synthetic | Final, List(defn.ObjectType), newScope, coord = sym.coord) val constr = newConstructor(cls, Synthetic, Nil, Nil, coord = sym.coord).entered val targs = fun match @@ -334,7 +337,7 @@ object Inliner { ConstFold(underlyingCodeArg).tpe.widenTermRefExpr match { case ConstantType(Constant(code: String)) => val source2 = SourceFile.virtual("tasty-reflect", code) - inContext(ctx.fresh.setNewTyperState().setTyper(new Typer).setSource(source2)) { + inContext(ctx.fresh.setNewTyperState().setTyper(new Typer(ctx.nestingLevel + 1)).setSource(source2)) { val tree2 = new Parser(source2).block() if ctx.reporter.allErrors.nonEmpty then ctx.reporter.allErrors.map((ErrorKind.Parser, _)) @@ -844,7 +847,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) { } else if (inlinedMethod == defn.Compiletime_summonInline) { def searchImplicit(tpt: Tree) = - val evTyper = new Typer + val evTyper = new Typer(ctx.nestingLevel + 1) val evCtx = ctx.fresh.setTyper(evTyper) val evidence = evTyper.inferImplicitArg(tpt.tpe, tpt.span)(using evCtx) evidence.tpe match @@ -1313,7 +1316,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) { } def searchImplicit(sym: TermSymbol, tpt: Tree) = { - val evTyper = new Typer + val evTyper = new Typer(ctx.nestingLevel + 1) val evCtx = ctx.fresh.setTyper(evTyper) val evidence = evTyper.inferImplicitArg(tpt.tpe, tpt.span)(using evCtx) evidence.tpe match { @@ -1511,7 +1514,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) { * 4. Make sure inlined code is type-correct. * 5. Make sure that the tree's typing is idempotent (so that future -Ycheck passes succeed) */ - class InlineTyper(initialErrorCount: Int) extends ReTyper { + class InlineTyper(initialErrorCount: Int, @constructorOnly nestingLevel: Int = ctx.nestingLevel + 1) extends ReTyper(nestingLevel) { import reducer._ override def ensureAccessible(tpe: Type, superAccess: Boolean, pos: SrcPos)(using Context): Type = { @@ -1663,7 +1666,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) { } } - override def newLikeThis: Typer = new InlineTyper(initialErrorCount) + override def newLikeThis(nestingLevel: Int): Typer = new InlineTyper(initialErrorCount, nestingLevel) /** True if this inline typer has already issued errors */ override def hasInliningErrors(using Context) = ctx.reporter.errorCount > initialErrorCount diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 9398b50db5e5..aa213d03b4e9 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -27,6 +27,8 @@ import reporting._ import config.Feature.sourceVersion import config.SourceVersion._ +import scala.annotation.constructorOnly + /** This class creates symbols from definitions and imports and gives them * lazy types. * @@ -86,13 +88,6 @@ class Namer { typer: Typer => */ lazy val nestedTyper: mutable.AnyRefMap[Symbol, Typer] = new mutable.AnyRefMap - /** The scope of the typer. - * For nested typers this is a place parameters are entered during completion - * and where they survive until typechecking. A context with this typer also - * has this scope. - */ - val scope: MutableScope = newScope - /** We are entering symbols coming from a SourceLoader */ private var lateCompile = false @@ -751,7 +746,7 @@ class Namer { typer: Typer => if (sym.is(Module)) moduleValSig(sym) else valOrDefDefSig(original, sym, Nil, identity)(using localContext(sym).setNewScope) case original: DefDef => - val typer1 = ctx.typer.newLikeThis + val typer1 = ctx.typer.newLikeThis(ctx.nestingLevel + 1) nestedTyper(sym) = typer1 typer1.defDefSig(original, sym, this)(using localContext(sym).setTyper(typer1)) case imp: Import => @@ -1018,7 +1013,7 @@ class Namer { typer: Typer => } class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx) { - withDecls(newScope) + withDecls(newScope(using ictx)) protected implicit val completerCtx: Context = localContext(cls) diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 4f604a5a0b93..85be8c32227a 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -22,7 +22,7 @@ import Nullables._ * * Otherwise, everything is as in Typer. */ -class ReTyper extends Typer with ReChecking { +class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking { import tpd._ private def assertTyped(tree: untpd.Tree)(using Context): Unit = diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 592fe59d5e09..41582950dbcd 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -51,6 +51,8 @@ import Nullables._ import NullOpsDecorator._ import config.Config +import scala.annotation.constructorOnly + object Typer { /** The precedence of bindings which determines which of several bindings will be @@ -110,7 +112,11 @@ object Typer { tree.putAttachment(HiddenSearchFailure, fail :: tree.attachmentOrElse(HiddenSearchFailure, Nil)) } -class Typer extends Namer +/** Typecheck trees, the main entry point is `typed`. + * + * @param nestingLevel The nesting level of the `scope` of this Typer. + */ +class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer with TypeAssigner with Applications with Implicits @@ -125,6 +131,13 @@ class Typer extends Namer import tpd.{cpy => _, _} import untpd.cpy + /** The scope of the typer. + * For nested typers (cf `Namer#nestedTyper`), this is a place parameters are + * entered during completion and where they survive until typechecking. A + * context with this typer also has this scope. + */ + val scope: MutableScope = newScope(nestingLevel) + /** A temporary data item valid for a single typed ident: * The set of all root import symbols that have been * encountered as a qualifier of an import so far. @@ -141,7 +154,7 @@ class Typer extends Namer private var foundUnderScala2: Type = NoType // Overridden in derived typers - def newLikeThis: Typer = new Typer + def newLikeThis(nestingLevel: Int): Typer = new Typer(nestingLevel) /** Find the type of an identifier with given `name` in given context `ctx`. * @param name the name of the identifier @@ -1652,7 +1665,7 @@ class Typer extends Namer /** Type a case. */ def typedCase(tree: untpd.CaseDef, sel: Tree, wideSelType: Type, pt: Type)(using Context): CaseDef = { val originalCtx = ctx - val gadtCtx: Context = ctx.fresh.setFreshGADTBounds + val gadtCtx: Context = ctx.fresh.setFreshGADTBounds.setNewScope def caseRest(pat: Tree)(using Context) = { val pt1 = instantiateMatchTypeProto(pat, pt) match { @@ -1677,7 +1690,7 @@ class Typer extends Namer val pat1 = typedPattern(tree.pat, wideSelType)(using gadtCtx) caseRest(pat1)( using Nullables.caseContext(sel, pat1)( - using gadtCtx.fresh.setNewScope)) + using gadtCtx)) } def typedLabeled(tree: untpd.Labeled)(using Context): Labeled = { From 454e30139dae571428f07c86d502a0ffe47e07e0 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sat, 20 Nov 2021 17:08:00 +0100 Subject: [PATCH 05/11] Replace useNecessaryEither by necessaryConstraintsOnly The name was misleading since it was also used in approximateWildcards, the new getter also returns true in GADT mode instead of having `either` manually check both the mode and the variable (this does not affect the behavior of approximateWildcards since GADT constraints never involve wildcards). --- .../dotty/tools/dotc/core/TypeComparer.scala | 44 +++++++++++-------- 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index daa7394dae52..47cfda3463aa 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -63,7 +63,23 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance - private var useNecessaryEither = false + private var myNecessaryConstraintsOnly = false + /** When collecting the constraints needed for a particular subtyping + * judgment to be true, we sometimes need to approximate the constraint + * set (see `TypeComparer#either` for example). + * + * Normally, this means adding extra constraints which may not be necessary + * for the subtyping judgment to be true, but if this variable is set to true + * we will instead under-approximate and keep only the constraints that must + * always be present for the subtyping judgment to hold. + * + * This is needed for GADT bounds inference to be sound, but it is also used + * when constraining a method call based on its expected type to avoid adding + * constraints that would later prevent us from typechecking method + * arguments, see or-inf.scala and and-inf.scala for examples. + */ + protected def necessaryConstraintsOnly(using Context) = + ctx.mode.is(Mode.GadtConstraintInference) || myNecessaryConstraintsOnly /** Is a subtype check in progress? In that case we may not * permanently instantiate type variables, because the corresponding @@ -134,20 +150,20 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } def necessarySubType(tp1: Type, tp2: Type): Boolean = - val saved = useNecessaryEither - useNecessaryEither = true + val saved = myNecessaryConstraintsOnly + myNecessaryConstraintsOnly = true try topLevelSubType(tp1, tp2) - finally useNecessaryEither = saved + finally myNecessaryConstraintsOnly = saved /** Use avoidance to get rid of wildcards in constraint bounds if * we are doing a necessary comparison, or the mode is TypeVarsMissContext. * The idea is that under either of these conditions we are not interested * in creating a fresh type variable to replace the wildcard. I verified * that several tests break if one or the other part of the disjunction is dropped. - * (for instance, i12677.scala demands `useNecessaryEither` in the condition) + * (for instance, i12677.scala demands `necessaryConstraintsOnly` in the condition) */ override protected def approximateWildcards: Boolean = - useNecessaryEither || ctx.mode.is(Mode.TypevarsMissContext) + necessaryConstraintsOnly || ctx.mode.is(Mode.TypevarsMissContext) def testSubType(tp1: Type, tp2: Type): CompareResult = GADTused = false @@ -1580,24 +1596,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Returns true iff the result of evaluating either `op1` or `op2` is true and approximates resulting constraints. * - * If we're inferring GADT bounds or constraining a method based on its - * expected type, we infer only the _necessary_ constraints, this means we - * keep the smaller constraint if any, or no constraint at all. This is - * necessary for GADT bounds inference to be sound. When constraining a - * method, this avoid committing of constraints that would later prevent us - * from typechecking method arguments, see or-inf.scala and and-inf.scala for - * examples. + * If `necessaryConstraintsOnly` is true, we keep the smaller constraint if + * any, or no constraint at all. * * Otherwise, we infer _sufficient_ constraints: we try to keep the smaller of * the two constraints, but if never is smaller than the other, we just pick * the first one. - * - * @see [[necessaryEither]] for the GADT / result type case - * @see [[sufficientEither]] for the normal case */ protected def either(op1: => Boolean, op2: => Boolean): Boolean = Stats.record("TypeComparer.either") - if ctx.mode.is(Mode.GadtConstraintInference) || useNecessaryEither then + if necessaryConstraintsOnly then necessaryEither(op1, op2) else sufficientEither(op1, op2) @@ -1673,7 +1681,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * T1 & T2 <:< T3 * T1 <:< T2 | T3 * - * Unlike [[sufficientEither]], this method is used in GADTConstraintInference mode, when we are attempting + * But this method is used when `useNecessaryEither` is true, like when we are attempting * to infer GADT constraints that necessarily follow from the subtyping relationship. For instance, if we have * * enum Expr[T] { From 362aaf282392d5a315a6d818e378562f03e01a75 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sun, 7 Nov 2021 12:17:03 +0100 Subject: [PATCH 06/11] Fix variance of approximating map in `addOneBound` It turns out that the variance was flipped: when adding a constraint `P <: List[WildcardType(Int, Any)]`, we should over-constrain to `P <: List[Int]`, but previously we would under-constrain to `P <: List[Any]` which would allow us later to infer `P := List[String]` for example. However, this logic needs to be flipped when inferring necessary constraints (this explains why the previous behavior was seemingly correct). No test case but this will end up being important in later commits of this PR where we re-use the same map to do more approximations. --- .../tools/dotc/core/ConstraintHandling.scala | 24 ++++++++++++++++++- .../dotty/tools/dotc/core/TypeComparer.scala | 18 -------------- 2 files changed, 23 insertions(+), 19 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 87f5b98a1744..889341184587 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -56,6 +56,24 @@ trait ConstraintHandling { */ protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty + protected var myNecessaryConstraintsOnly = false + /** When collecting the constraints needed for a particular subtyping + * judgment to be true, we sometimes need to approximate the constraint + * set (see `TypeComparer#either` for example). + * + * Normally, this means adding extra constraints which may not be necessary + * for the subtyping judgment to be true, but if this variable is set to true + * we will instead under-approximate and keep only the constraints that must + * always be present for the subtyping judgment to hold. + * + * This is needed for GADT bounds inference to be sound, but it is also used + * when constraining a method call based on its expected type to avoid adding + * constraints that would later prevent us from typechecking method + * arguments, see or-inf.scala and and-inf.scala for examples. + */ + protected def necessaryConstraintsOnly(using Context): Boolean = + ctx.mode.is(Mode.GadtConstraintInference) || myNecessaryConstraintsOnly + def checkReset() = assert(addConstraintInvocations == 0) assert(frozenConstraint == false) @@ -92,7 +110,11 @@ trait ConstraintHandling { false else val dropWildcards = new AvoidWildcardsMap: - if !isUpper then variance = -1 + // Approximate the upper-bound from below and vice-versa + if isUpper then variance = -1 + // ...unless we can only infer necessary constraints, in which case we + // flip the variance to under-approximate. + if necessaryConstraintsOnly then variance = -variance override def mapWild(t: WildcardType) = if approximateWildcards then super.mapWild(t) else newTypeVar(apply(t.effectiveBounds).toBounds) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 47cfda3463aa..7e91b245787c 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -63,24 +63,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance - private var myNecessaryConstraintsOnly = false - /** When collecting the constraints needed for a particular subtyping - * judgment to be true, we sometimes need to approximate the constraint - * set (see `TypeComparer#either` for example). - * - * Normally, this means adding extra constraints which may not be necessary - * for the subtyping judgment to be true, but if this variable is set to true - * we will instead under-approximate and keep only the constraints that must - * always be present for the subtyping judgment to hold. - * - * This is needed for GADT bounds inference to be sound, but it is also used - * when constraining a method call based on its expected type to avoid adding - * constraints that would later prevent us from typechecking method - * arguments, see or-inf.scala and and-inf.scala for examples. - */ - protected def necessaryConstraintsOnly(using Context) = - ctx.mode.is(Mode.GadtConstraintInference) || myNecessaryConstraintsOnly - /** Is a subtype check in progress? In that case we may not * permanently instantiate type variables, because the corresponding * constraint might still be retracted and the instantiation should From 07588ab394e54d4d794f4c1364bb51195aeddb1f Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sun, 7 Nov 2021 11:45:56 +0100 Subject: [PATCH 07/11] Drop unnecessary ConstraintHandling#approximateWildcards After improving Inferencing#variances to take the type variables appearing into the expected type into account, the special-casing based on necessaryConstraintsOnly provided by approximateWildcards turned out to be unnecessary. This change required tweaking the -Ytest-pickler logic to avoid a regression in tests/pos/i8802a.scala where a widened skolem in a prefix lead to a pretty-printing difference. --- .../tools/dotc/core/ConstraintHandling.scala | 7 +-- .../dotty/tools/dotc/core/TypeComparer.scala | 10 ----- .../tools/dotc/printing/PlainPrinter.scala | 2 + .../dotty/tools/dotc/typer/Inferencing.scala | 43 +++++++++++-------- 4 files changed, 28 insertions(+), 34 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 889341184587..107d7008776f 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -97,11 +97,6 @@ trait ConstraintHandling { def fullBounds(param: TypeParamRef)(using Context): TypeBounds = nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param)) - /** If true, eliminate wildcards in bounds by avoidance, otherwise replace - * them by fresh variables. - */ - protected def approximateWildcards: Boolean = true - protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = if !constraint.contains(param) then true else if !isUpper && param.occursIn(rawBound) then @@ -116,7 +111,7 @@ trait ConstraintHandling { // flip the variance to under-approximate. if necessaryConstraintsOnly then variance = -variance override def mapWild(t: WildcardType) = - if approximateWildcards then super.mapWild(t) + if ctx.mode.is(Mode.TypevarsMissContext) then super.mapWild(t) else newTypeVar(apply(t.effectiveBounds).toBounds) val bound = dropWildcards(rawBound) val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 7e91b245787c..1d75c9ef0019 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -137,16 +137,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling try topLevelSubType(tp1, tp2) finally myNecessaryConstraintsOnly = saved - /** Use avoidance to get rid of wildcards in constraint bounds if - * we are doing a necessary comparison, or the mode is TypeVarsMissContext. - * The idea is that under either of these conditions we are not interested - * in creating a fresh type variable to replace the wildcard. I verified - * that several tests break if one or the other part of the disjunction is dropped. - * (for instance, i12677.scala demands `necessaryConstraintsOnly` in the condition) - */ - override protected def approximateWildcards: Boolean = - necessaryConstraintsOnly || ctx.mode.is(Mode.TypevarsMissContext) - def testSubType(tp1: Type, tp2: Type): CompareResult = GADTused = false if !topLevelSubType(tp1, tp2) then CompareResult.Fail diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index bc6258398f54..d2efbeff2901 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -66,6 +66,8 @@ class PlainPrinter(_ctx: Context) extends Printer { case tp @ AppliedType(tycon, args) => if (defn.isCompiletimeAppliedType(tycon.typeSymbol)) tp.tryCompiletimeConstantFold else tycon.dealias.appliedTo(args) + case tp: NamedType => + tp.reduceProjection case _ => tp } diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index c544a0423d51..ba9116e4f6be 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -437,12 +437,19 @@ object Inferencing { type VarianceMap = SimpleIdentityMap[TypeVar, Integer] - /** All occurrences of type vars in this type that satisfy predicate - * `include` mapped to their variances (-1/0/1) in this type, where + /** All occurrences of type vars in `tp` that satisfy predicate + * `include` mapped to their variances (-1/0/1) in both `tp` and + * `pt.finalResultType`, where * -1 means: only contravariant occurrences * +1 means: only covariant occurrences * 0 means: mixed or non-variant occurrences * + * We need to take the occurences in `pt` into account because a type + * variable created when typing the current tree might only appear in the + * bounds of a type variable in the expected type, for example when + * `ConstraintHandling#addOneBound` creates type variables when approximating + * a bound. + * * Note: We intentionally use a relaxed version of variance here, * where the variance does not change under a prefix of a named type * (the strict version makes prefixes invariant). This turns out to be @@ -453,7 +460,7 @@ object Inferencing { * * we want to instantiate U to x.type right away. No need to wait further. */ - private def variances(tp: Type)(using Context): VarianceMap = { + private def variances(tp: Type, pt: Type = WildcardType)(using Context): VarianceMap = { Stats.record("variances") val constraint = ctx.typerState.constraint @@ -486,21 +493,21 @@ object Inferencing { def traverse(tp: Type) = { vmap1 = accu(vmap1, tp) } vmap.foreachBinding { (tvar, v) => val param = tvar.origin - val e = constraint.entry(param) - accu.setVariance(v) - if (v >= 0) { - traverse(e.bounds.lo) - constraint.lower(param).foreach(p => traverse(constraint.typeVarOfParam(p))) - } - if (v <= 0) { - traverse(e.bounds.hi) - constraint.upper(param).foreach(p => traverse(constraint.typeVarOfParam(p))) - } + constraint.entry(param) match + case TypeBounds(lo, hi) => + accu.setVariance(v) + if v >= 0 then + traverse(lo) + constraint.lower(param).foreach(p => traverse(constraint.typeVarOfParam(p))) + if v <= 0 then + traverse(hi) + constraint.upper(param).foreach(p => traverse(constraint.typeVarOfParam(p))) + case _ => } if (vmap1 eq vmap) vmap else propagate(vmap1) } - propagate(accu(SimpleIdentityMap.empty, tp)) + propagate(accu(accu(SimpleIdentityMap.empty, tp), pt.finalResultType)) } /** Run the transformation after dealiasing but return the original type if it was a no-op. */ @@ -546,8 +553,8 @@ trait Inferencing { this: Typer => * @param locked the set of type variables of the current typer state that cannot be interpolated * at the present time * Eligible for interpolation are all type variables owned by the current typerstate - * that are not in locked. Type variables occurring co- (respectively, contra-) variantly in the type - * are minimized (respectvely, maximized). Non occurring type variables are minimized if they + * that are not in locked. Type variables occurring co- (respectively, contra-) variantly in the tree type + * or expected type are minimized (respectvely, maximized). Non occurring type variables are minimized if they * have a lower bound different from Nothing, maximized otherwise. Type variables appearing * non-variantly in the type are left untouched. * @@ -572,7 +579,7 @@ trait Inferencing { this: Typer => if ((ownedVars ne locked) && !ownedVars.isEmpty) { val qualifying = ownedVars -- locked if (!qualifying.isEmpty) { - typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") + typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") val resultAlreadyConstrained = tree.isInstanceOf[Apply] || tree.tpe.isInstanceOf[MethodOrPoly] if (!resultAlreadyConstrained) @@ -580,7 +587,7 @@ trait Inferencing { this: Typer => // This is needed because it could establish singleton type upper bounds. See i2998.scala. val tp = tree.tpe.widen - val vs = variances(tp) + val vs = variances(tp, pt) // Avoid interpolating variables occurring in tree's type if typerstate has unreported errors. // Reason: The errors might reflect unsatisfiable constraints. In that From 8ed6bde18cf76c423cd98979643b6d5ef0ab1a81 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sun, 7 Nov 2021 14:55:42 +0100 Subject: [PATCH 08/11] Better level-checking in constraints to handle bad bounds Previously, we enforced level-correctness only when instantiating type variables, but this is not good enough as demonstrated by tests/neg/i8900.scala. The problem is that if we allow level-incorrect bounds, then we might end up reasoning with bad bounds outside of the scope where they are defined. This can lead to level-correct but unsound instantiations. To prevent this, we now enforce level-correctness in constraints at all times, we also introduce `AvoidMap` to share more logic between level-avoidance and symbol-avoidance (see also the added TODO on `TypeOps#avoid`). Note that this implementation is still incomplete: we only check the nestingLevel of NamedTypes, but we also need to check for TypeVars, this will be handled in the next commit. --- .../tools/dotc/core/ConstraintHandling.scala | 58 +++++-- .../tools/dotc/core/GadtConstraint.scala | 5 + .../src/dotty/tools/dotc/core/TypeOps.scala | 150 ++++++++++-------- .../src/dotty/tools/dotc/core/Types.scala | 35 +--- .../dotty/tools/dotc/typer/Inferencing.scala | 2 +- tests/neg/i8900.scala | 28 ++++ tests/{neg => run}/i8861.scala | 8 +- 7 files changed, 172 insertions(+), 114 deletions(-) create mode 100644 tests/neg/i8900.scala rename tests/{neg => run}/i8861.scala (77%) diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 107d7008776f..7df57fc40c10 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -81,6 +81,10 @@ trait ConstraintHandling { assert(homogenizeArgs == false) assert(comparedTypeLambdas == Set.empty) + def nestingLevel(param: TypeParamRef) = constraint.typeVarOfParam(param) match + case tv: TypeVar => tv.nestingLevel + case _ => Int.MaxValue + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = constraint.nonParamBounds(param) def fullLowerBound(param: TypeParamRef)(using Context): Type = @@ -97,6 +101,49 @@ trait ConstraintHandling { def fullBounds(param: TypeParamRef)(using Context): TypeBounds = nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param)) + /** An approximating map that prevents types nested deeper than maxLevel as + * well as WildcardTypes from leaking into the constraint. + * Note that level-checking is turned off after typer and in uncommitable + * TyperState since these leaks should be safe. + */ + class LevelAvoidMap(topLevelVariance: Int, maxLevel: Int)(using Context) extends TypeOps.AvoidMap: + variance = topLevelVariance + + /** Are we allowed to refer to types of the given `level`? */ + private def levelOK(level: Int): Boolean = + level <= maxLevel || ctx.isAfterTyper || !ctx.typerState.isCommittable + + def toAvoid(tp: NamedType): Boolean = + tp.prefix == NoPrefix && !tp.symbol.isStatic && !levelOK(tp.symbol.nestingLevel) + + override def mapWild(t: WildcardType) = + if ctx.mode.is(Mode.TypevarsMissContext) then super.mapWild(t) + else + val tvar = newTypeVar(apply(t.effectiveBounds).toBounds) + tvar + end LevelAvoidMap + + /** Approximate `rawBound` if needed to make it a legal bound of `param` by + * avoiding wildcards and types with a level strictly greater than its + * `nestingLevel`. + * + * Note that level-checking must be performed here and cannot be delayed + * until instantiation because if we allow level-incorrect bounds, then we + * might end up reasoning with bad bounds outside of the scope where they are + * defined. This can lead to level-correct but unsound instantiations as + * demonstrated by tests/neg/i8900.scala. + */ + protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = + // Over-approximate for soundness. + var variance = if isUpper then -1 else 1 + // ...unless we can only infer necessary constraints, in which case we + // flip the variance to under-approximate. + if necessaryConstraintsOnly then variance = -variance + + val approx = LevelAvoidMap(variance, nestingLevel(param)) + approx(rawBound) + end legalBound + protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = if !constraint.contains(param) then true else if !isUpper && param.occursIn(rawBound) then @@ -104,16 +151,7 @@ trait ConstraintHandling { // so we shouldn't allow them as constraints either. false else - val dropWildcards = new AvoidWildcardsMap: - // Approximate the upper-bound from below and vice-versa - if isUpper then variance = -1 - // ...unless we can only infer necessary constraints, in which case we - // flip the variance to under-approximate. - if necessaryConstraintsOnly then variance = -variance - override def mapWild(t: WildcardType) = - if ctx.mode.is(Mode.TypevarsMissContext) then super.mapWild(t) - else newTypeVar(apply(t.effectiveBounds).toBounds) - val bound = dropWildcards(rawBound) + val bound = legalBound(param, rawBound, isUpper) val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) val equalBounds = (if isUpper then lo else hi) eq bound if equalBounds && !bound.existsPart(_ eq param, StopAt.Static) then diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index 7d84b9892057..3e68e9546553 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -79,6 +79,11 @@ final class ProperGadtConstraint private( subsumes(extractConstraint(left), extractConstraint(right), extractConstraint(pre)) } + override protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = + // GADT constraints never involve wildcards and are not propagated outside + // the case where they're valid, so no approximating is needed. + rawBound + override def addToConstraint(params: List[Symbol])(using Context): Boolean = { import NameKinds.DepParamName diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index da5056bf1a00..1a14cd0d722c 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -410,91 +410,109 @@ object TypeOps: } } - /** An upper approximation of the given type `tp` that does not refer to any symbol in `symsToAvoid`. + /** An approximating map that drops NamedTypes matching `toAvoid` and wildcard types. */ + abstract class AvoidMap(using Context) extends AvoidWildcardsMap: + @threadUnsafe lazy val localParamRefs = util.HashSet[Type]() + + def toAvoid(tp: NamedType): Boolean + + /** True iff all NamedTypes on this prefix are static */ + override def isStaticPrefix(pre: Type)(using Context): Boolean = pre match + case pre: NamedType => + val sym = pre.currentSymbol + sym.is(Package) || sym.isStatic && isStaticPrefix(pre.prefix) + case _ => true + + override def apply(tp: Type): Type = tp match + case tp: TermRef + if toAvoid(tp) => + tp.info.widenExpr.dealias match { + case info: SingletonType => apply(info) + case info => range(defn.NothingType, apply(info)) + } + case tp: TypeRef if toAvoid(tp) => + tp.info match { + case info: AliasingBounds => + apply(info.alias) + case TypeBounds(lo, hi) => + range(atVariance(-variance)(apply(lo)), apply(hi)) + case info: ClassInfo => + range(defn.NothingType, apply(classBound(info))) + case _ => + emptyRange // should happen only in error cases + } + case tp: ThisType => + // ThisType is only used inside a class. + // Therefore, either they don't appear in the type to be avoided, or + // it must be a class that encloses the block whose type is to be avoided. + tp + case tp: LazyRef => + if localParamRefs.contains(tp.ref) then tp + else if isExpandingBounds then emptyRange + else mapOver(tp) + case tl: HKTypeLambda => + localParamRefs ++= tl.paramRefs + mapOver(tl) + case _ => + super.apply(tp) + end apply + + /** Three deviations from standard derivedSelect: + * 1. We first try a widening conversion to the type's info with + * the original prefix. Since the original prefix is known to + * be a subtype of the returned prefix, this can improve results. + * 2. Then, if the approximation result is a singleton reference C#x.type, we + * replace by the widened type, which is usually more natural. + * 3. Finally, we need to handle the case where the prefix type does not have a member + * named `tp.name` anymmore. In that case, we need to fall back to Bot..Top. + */ + override def derivedSelect(tp: NamedType, pre: Type) = + if (pre eq tp.prefix) + tp + else tryWiden(tp, tp.prefix).orElse { + if (tp.isTerm && variance > 0 && !pre.isSingleton) + apply(tp.info.widenExpr) + else if (upper(pre).member(tp.name).exists) + super.derivedSelect(tp, pre) + else + range(defn.NothingType, defn.AnyType) + } + end AvoidMap + + /** An upper approximation of the given type `tp` that does not refer to any symbol in `symsToAvoid` + * and does not contain any WildcardType. * We need to approximate with ranges: * * term references to symbols in `symsToAvoid`, * term references that have a widened type of which some part refers * to a symbol in `symsToAvoid`, * type references to symbols in `symsToAvoid`, - * this types of classes in `symsToAvoid`. * * Type variables that would be interpolated to a type that * needs to be widened are replaced by the widened interpolation instance. + * + * TODO: Could we replace some or all usages of this method by + * `LevelAvoidMap` instead? It would be good to investigate this in details + * but when I tried it, avoidance for inlined trees broke because `TreeMap` + * does not update `ctx.nestingLevel` when entering a block so I'm leaving + * this as Future Work™. */ def avoid(tp: Type, symsToAvoid: => List[Symbol])(using Context): Type = { - val widenMap = new ApproximatingTypeMap { + val widenMap = new AvoidMap { @threadUnsafe lazy val forbidden = symsToAvoid.toSet - @threadUnsafe lazy val localParamRefs = util.HashSet[Type]() - def toAvoid(sym: Symbol) = !sym.isStatic && forbidden.contains(sym) - def partsToAvoid = new NamedPartsAccumulator(tp => toAvoid(tp.symbol)) - - /** True iff all NamedTypes on this prefix are static */ - override def isStaticPrefix(pre: Type)(using Context): Boolean = pre match - case pre: NamedType => - val sym = pre.currentSymbol - sym.is(Package) || sym.isStatic && isStaticPrefix(pre.prefix) - case _ => true - - def apply(tp: Type): Type = tp match - case tp: TermRef - if toAvoid(tp.symbol) || partsToAvoid(Nil, tp.info).nonEmpty => - tp.info.widenExpr.dealias match { - case info: SingletonType => apply(info) - case info => range(defn.NothingType, apply(info)) - } - case tp: TypeRef if toAvoid(tp.symbol) => - tp.info match { - case info: AliasingBounds => - apply(info.alias) - case TypeBounds(lo, hi) => - range(atVariance(-variance)(apply(lo)), apply(hi)) - case info: ClassInfo => - range(defn.NothingType, apply(classBound(info))) - case _ => - emptyRange // should happen only in error cases - } - case tp: ThisType => - // ThisType is only used inside a class. - // Therefore, either they don't appear in the type to be avoided, or - // it must be a class that encloses the block whose type is to be avoided. - tp + def toAvoid(tp: NamedType) = + val sym = tp.symbol + !sym.isStatic && forbidden.contains(sym) + + override def apply(tp: Type): Type = tp match case tp: TypeVar if mapCtx.typerState.constraint.contains(tp) => val lo = TypeComparer.instanceType( tp.origin, fromBelow = variance > 0 || variance == 0 && tp.hasLowerBound)(using mapCtx) val lo1 = apply(lo) if (lo1 ne lo) lo1 else tp - case tp: LazyRef => - if localParamRefs.contains(tp.ref) then tp - else if isExpandingBounds then emptyRange - else mapOver(tp) - case tl: HKTypeLambda => - localParamRefs ++= tl.paramRefs - mapOver(tl) case _ => - mapOver(tp) + super.apply(tp) end apply - - /** Three deviations from standard derivedSelect: - * 1. We first try a widening conversion to the type's info with - * the original prefix. Since the original prefix is known to - * be a subtype of the returned prefix, this can improve results. - * 2. Then, if the approximation result is a singleton reference C#x.type, we - * replace by the widened type, which is usually more natural. - * 3. Finally, we need to handle the case where the prefix type does not have a member - * named `tp.name` anymmore. In that case, we need to fall back to Bot..Top. - */ - override def derivedSelect(tp: NamedType, pre: Type) = - if (pre eq tp.prefix) - tp - else tryWiden(tp, tp.prefix).orElse { - if (tp.isTerm && variance > 0 && !pre.isSingleton) - apply(tp.info.widenExpr) - else if (upper(pre).member(tp.name).exists) - super.derivedSelect(tp, pre) - else - range(defn.NothingType, defn.AnyType) - } } widenMap(tp) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 59b8405f0e4c..68e2dc92303d 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4668,7 +4668,6 @@ object Types { * @param creatorState The typer state in which the variable was created. */ final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState, val nestingLevel: Int) extends CachedProxyType with ValueType { - private var currentOrigin = initOrigin def origin: TypeParamRef = currentOrigin @@ -4709,38 +4708,6 @@ object Types { /** Is the variable already instantiated? */ def isInstantiated(using Context): Boolean = instanceOpt.exists - /** Avoid term references in `tp` to parameters or local variables that - * are nested more deeply than the type variable itself. - */ - private def avoidCaptures(tp: Type)(using Context): Type = - if ctx.isAfterTyper then - return tp - val problemSyms = new TypeAccumulator[Set[Symbol]]: - def apply(syms: Set[Symbol], t: Type): Set[Symbol] = t match - case ref: NamedType - // AVOIDANCE TODO: Are there other problematic kinds of references? - // Our current tests only give us these, but we might need to generalize this. - if (ref.prefix eq NoPrefix) && ref.symbol.maybeOwner.nestingLevel > nestingLevel => - syms + ref.symbol - case _ => - foldOver(syms, t) - val problems = problemSyms(Set.empty, tp) - if problems.isEmpty then tp - else - val atp = TypeOps.avoid(tp, problems.toList) - def msg = i"Inaccessible variables captured in instantation of type variable $this.\n$tp was fixed to $atp" - typr.println(msg) - val bound = TypeComparer.fullUpperBound(origin) - if !(atp <:< bound) then - throw new TypeError(i"$msg,\nbut the latter type does not conform to the upper bound $bound") - atp - // AVOIDANCE TODO: This really works well only if variables are instantiated from below - // If we hit a problematic symbol while instantiating from above, then avoidance - // will widen the instance type further. This could yield an alias, which would be OK. - // But it also could yield a true super type which would then fail the bounds check - // and throw a TypeError. The right thing to do instead would be to avoid "downwards". - // To do this, we need first test cases for that situation. - /** Instantiate variable with given type */ def instantiateWith(tp: Type)(using Context): Type = { assert(tp ne this, i"self instantiation of $origin, constraint = ${ctx.typerState.constraint}") @@ -4765,7 +4732,7 @@ object Types { * is also a singleton type. */ def instantiate(fromBelow: Boolean)(using Context): Type = - val tp = avoidCaptures(TypeComparer.instanceType(origin, fromBelow)) + val tp = TypeComparer.instanceType(origin, fromBelow) if myInst.exists then // The line above might have triggered instantiation of the current type variable myInst else diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index ba9116e4f6be..426543879103 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -447,7 +447,7 @@ object Inferencing { * We need to take the occurences in `pt` into account because a type * variable created when typing the current tree might only appear in the * bounds of a type variable in the expected type, for example when - * `ConstraintHandling#addOneBound` creates type variables when approximating + * `ConstraintHandling#legalBound` creates type variables when approximating * a bound. * * Note: We intentionally use a relaxed version of variance here, diff --git a/tests/neg/i8900.scala b/tests/neg/i8900.scala new file mode 100644 index 000000000000..c82f9cd18aa5 --- /dev/null +++ b/tests/neg/i8900.scala @@ -0,0 +1,28 @@ +trait Base { + type M +} +trait A { + type M >: Int | String +} +trait B { + type M <: Int & String +} +object Test { + def foo[T](z: T, x: A & B => T): T = z + def foo2[T](z: T, x: T): T = z + + def main(args: Array[String]): Unit = { + val x = foo(1, x => (??? : x.M)) + val x1: String = x // error (was: ClassCastException) + + val a = foo2(1, + if false then + val x: A & B = ??? + ??? : x.M + else 1 + ) + + val b: String = a // error (was: ClassCastException) + } +} + diff --git a/tests/neg/i8861.scala b/tests/run/i8861.scala similarity index 77% rename from tests/neg/i8861.scala rename to tests/run/i8861.scala index 744b49b0107b..e1e802a5c72b 100644 --- a/tests/neg/i8861.scala +++ b/tests/run/i8861.scala @@ -18,14 +18,16 @@ object Test { int = vi => vi.i : vi.A, str = vs => vs.t : vs.A ) + // Used to infer `c.visit[Int & M)]` and error out in the second lambda, + // now infers `c.visit[(Int & M | String & M)]` def minimalFail[M](c: Container { type A = M }): M = c.visit( int = vi => vi.i : vi.A, - str = vs => vs.t : vs.A // error // error + str = vs => vs.t : vs.A ) def main(args: Array[String]): Unit = { val e: Container { type A = String } = new StrV println(minimalOk(e)) // this one prints "hello" - println(minimalFail(e)) // this one fails with ClassCastException: class java.lang.String cannot be cast to class java.lang.Integer + println(minimalFail(e)) // used to fail with ClassCastException, now prints "hello" } -} \ No newline at end of file +} From 3ab18a90ac24dba440c498aab6a8f0c763589605 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Wed, 17 Nov 2021 17:10:15 +0100 Subject: [PATCH 09/11] Create fresh type variables to keep constraints level-correct This completes the implementation of `LevelAvoidMap` from the previous commit: we now make sure that the nonParamBounds of a type variable does not refer to variables of a higher level by creating fresh variables of the appropriate level if necessary. Each fresh variable will be upper- or lower-bounded by the existing variable it is substituted for depending on variance (an idea that I got from [1]), in the invariant case the existing variable will be instantiated to the fresh one (unlike [2], we can't simply mutate the nestingLevel of the existing variable after running avoidance on its bounds because the constraint containing these bounds might later be retracted). Additionally: - When unifying two type variables, keep the one with the lowest level in the constraint set and make sure the bounds transferred from the other one are level-correct. This required some changes in `Constraint#addLess` which previously assumed that `unify` would always keep the second parameter. - When instantiating a type variable to its full lower- or upper-bound, we also need to avoid any type variable of a higher level among its param bound. This commit is necessary to avoid leaking local types in i8900a2.scala and i8900a3.scala, these kind of leaks will become compile-time error in the next commit. This commit required making a type parameter explicit both in SnippetChecker.scala and i13809/Macros_1.scala, in both situations the problem is that the lambda passed to `map` can only be typed if the type argument of `map` contains a wildcard, but LevelAvoidMap instead creates a fresh type variable of a lower level at a point where we don't know yet that this cannot work. Since this situation seems very rare in practice, I believe this is an acceptable trade-off for soundness. [1]: Lionel Parreaux. "The simple essence of algebraic subtyping: principal type inference with subtyping made easy (functional pearl)." https://dl.acm.org/doi/abs/10.1145/3409006 [2]: Oleg Kiselyov. "How OCaml type checker works -- or what polymorphism and garbage collection have in common" https://okmij.org/ftp/ML/generalization.html --- .../dotty/tools/dotc/core/Constraint.scala | 26 ++- .../tools/dotc/core/ConstraintHandling.scala | 149 ++++++++++++++++-- .../tools/dotc/core/GadtConstraint.scala | 1 + .../src/dotty/tools/dotc/core/NameKinds.scala | 7 + .../src/dotty/tools/dotc/core/NameTags.scala | 5 + .../tools/dotc/core/OrderingConstraint.scala | 34 ++-- .../tools/dotc/core/TypeApplications.scala | 10 ++ .../src/dotty/tools/dotc/core/Types.scala | 13 +- .../dotty/tools/dotc/typer/Inferencing.scala | 69 +++++--- .../dotty/tools/dotc/typer/ProtoTypes.scala | 48 +++--- .../scaladoc/snippets/SnippetChecker.scala | 3 +- tests/neg-macros/i13809/Macros_1.scala | 2 +- tests/neg/i12284.check | 50 ++---- tests/neg/i12284.scala | 11 +- tests/{neg => pos}/i864.scala | 0 tests/pos/i8900-cycle.scala | 15 ++ tests/pos/i8900-polyfunction.scala | 5 + tests/pos/i8900-promote.scala | 18 +++ tests/pos/i8900-unflip.scala | 18 +++ tests/pos/i8900a2.scala | 12 ++ tests/pos/i8900a3.scala | 13 ++ 21 files changed, 381 insertions(+), 128 deletions(-) rename tests/{neg => pos}/i864.scala (100%) create mode 100644 tests/pos/i8900-cycle.scala create mode 100644 tests/pos/i8900-polyfunction.scala create mode 100644 tests/pos/i8900-promote.scala create mode 100644 tests/pos/i8900-unflip.scala create mode 100644 tests/pos/i8900a2.scala create mode 100644 tests/pos/i8900a3.scala diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala index 32d23deea397..c35c93886cd8 100644 --- a/compiler/src/dotty/tools/dotc/core/Constraint.scala +++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala @@ -93,13 +93,15 @@ abstract class Constraint extends Showable { /** A constraint that includes the relationship `p1 <: p2`. * `<:` relationships between parameters ("edges") are propagated, but * non-parameter bounds are left alone. + * + * @param direction Must be set to `KeepParam1` or `KeepParam2` when + * `p2 <: p1` is already true depending on which parameter + * the caller intends to keep. This will avoid propagating + * bounds that will be redundant after `p1` and `p2` are + * unified. */ - def addLess(p1: TypeParamRef, p2: TypeParamRef)(using Context): This - - /** A constraint resulting from adding p2 = p1 to this constraint, and at the same - * time transferring all bounds of p2 to p1 - */ - def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): This + def addLess(p1: TypeParamRef, p2: TypeParamRef, + direction: UnificationDirection = UnificationDirection.NoUnification)(using Context): This /** A new constraint which is derived from this constraint by removing * the type parameter `param` from the domain and replacing all top-level occurrences @@ -174,3 +176,15 @@ abstract class Constraint extends Showable { */ def checkConsistentVars()(using Context): Unit } + +/** When calling `Constraint#addLess(p1, p2, ...)`, the caller might end up + * unifying one parameter with the other, this enum lets `addLess` know which + * direction the unification will take. + */ +enum UnificationDirection: + /** Neither p1 nor p2 will be instantiated. */ + case NoUnification + /** `p2 := p1`, p1 left uninstantiated. */ + case KeepParam1 + /** `p1 := p2`, p2 left uninstantiated. */ + case KeepParam2 diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 7df57fc40c10..835da2176a33 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -10,8 +10,10 @@ import Flags._ import config.Config import config.Printers.typr import reporting.trace -import typer.ProtoTypes.newTypeVar +import typer.ProtoTypes.{newTypeVar, representedParamRef} import StdNames.tpnme +import UnificationDirection.* +import NameKinds.AvoidNameKind /** Methods for adding constraints and solving them. * @@ -85,13 +87,39 @@ trait ConstraintHandling { case tv: TypeVar => tv.nestingLevel case _ => Int.MaxValue + /** If `param` is nested deeper than `maxLevel`, try to instantiate it to a + * fresh type variable of level `maxLevel` and return the new variable. + * If this isn't possible, throw a TypeError. + */ + def atLevel(maxLevel: Int, param: TypeParamRef)(using Context): TypeParamRef = + if nestingLevel(param) <= maxLevel then return param + LevelAvoidMap(0, maxLevel)(param) match + case freshVar: TypeVar => freshVar.origin + case _ => throw new TypeError( + i"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = constraint.nonParamBounds(param) + /** The full lower bound of `param` includes both the `nonParamBounds` and the + * params in the constraint known to be `<: param`, except that + * params with a `nestingLevel` higher than `param` will be instantiated + * to a fresh param at a legal level. See the documentation of `TypeVar` + * for details. + */ def fullLowerBound(param: TypeParamRef)(using Context): Type = - constraint.minLower(param).foldLeft(nonParamBounds(param).lo)(_ | _) + val maxLevel = nestingLevel(param) + var loParams = constraint.minLower(param) + if maxLevel != Int.MaxValue then + loParams = loParams.mapConserve(atLevel(maxLevel, _)) + loParams.foldLeft(nonParamBounds(param).lo)(_ | _) + /** The full upper bound of `param`, see the documentation of `fullLowerBounds` above. */ def fullUpperBound(param: TypeParamRef)(using Context): Type = - constraint.minUpper(param).foldLeft(nonParamBounds(param).hi)(_ & _) + val maxLevel = nestingLevel(param) + var hiParams = constraint.minUpper(param) + if maxLevel != Int.MaxValue then + hiParams = hiParams.mapConserve(atLevel(maxLevel, _)) + hiParams.foldLeft(nonParamBounds(param).hi)(_ & _) /** Full bounds of `param`, including other lower/upper params. * @@ -116,10 +144,64 @@ trait ConstraintHandling { def toAvoid(tp: NamedType): Boolean = tp.prefix == NoPrefix && !tp.symbol.isStatic && !levelOK(tp.symbol.nestingLevel) + /** Return a (possibly fresh) type variable of a level no greater than `maxLevel` which is: + * - lower-bounded by `tp` if variance >= 0 + * - upper-bounded by `tp` if variance <= 0 + * If this isn't possible, return the empty range. + */ + def legalVar(tp: TypeVar): Type = + val oldParam = tp.origin + val nameKind = + if variance > 0 then AvoidNameKind.UpperBound + else if variance < 0 then AvoidNameKind.LowerBound + else AvoidNameKind.BothBounds + + /** If it exists, return the first param in the list created in a previous call to `legalVar(tp)` + * with the appropriate level and variance. + */ + def findParam(params: List[TypeParamRef]): Option[TypeParamRef] = + params.find(p => + nestingLevel(p) <= maxLevel && representedParamRef(p) == oldParam && + (p.paramName.is(AvoidNameKind.BothBounds) || + variance != 0 && p.paramName.is(nameKind))) + + // First, check if we can reuse an existing parameter, this is more than an optimization + // since it avoids an infinite loop in tests/pos/i8900-cycle.scala + findParam(constraint.lower(oldParam)).orElse(findParam(constraint.upper(oldParam))) match + case Some(param) => + constraint.typeVarOfParam(param) + case _ => + // Otherwise, try to return a fresh type variable at `maxLevel` with + // the appropriate constraints. + val name = nameKind(oldParam.paramName.toTermName).toTypeName + val freshVar = newTypeVar(TypeBounds.upper(tp.topType), name, + nestingLevel = maxLevel, represents = oldParam) + val ok = + if variance < 0 then + addLess(freshVar.origin, oldParam) + else if variance > 0 then + addLess(oldParam, freshVar.origin) + else + unify(freshVar.origin, oldParam) + if ok then freshVar else emptyRange + end legalVar + + override def apply(tp: Type): Type = tp match + case tp: TypeVar if !tp.isInstantiated && !levelOK(tp.nestingLevel) => + legalVar(tp) + // TypeParamRef can occur in tl bounds + case tp: TypeParamRef => + constraint.typeVarOfParam(tp) match + case tvar: TypeVar => + apply(tvar) + case _ => super.apply(tp) + case _ => + super.apply(tp) + override def mapWild(t: WildcardType) = if ctx.mode.is(Mode.TypevarsMissContext) then super.mapWild(t) else - val tvar = newTypeVar(apply(t.effectiveBounds).toBounds) + val tvar = newTypeVar(apply(t.effectiveBounds).toBounds, nestingLevel = maxLevel) tvar end LevelAvoidMap @@ -140,7 +222,16 @@ trait ConstraintHandling { // flip the variance to under-approximate. if necessaryConstraintsOnly then variance = -variance - val approx = LevelAvoidMap(variance, nestingLevel(param)) + val approx = new LevelAvoidMap(variance, nestingLevel(param)): + override def legalVar(tp: TypeVar): Type = + // `legalVar` will create a type variable whose bounds depend on + // `variance`, but whether the variance is positive or negative, + // we can still infer necessary constraints since just creating a + // type variable doesn't reduce the set of possible solutions. + // Therefore, we can safely "unflip" the variance flipped above. + // This is necessary for i8900-unflip.scala to typecheck. + val v = if necessaryConstraintsOnly then -this.variance else this.variance + atVariance(v)(super.legalVar(tp)) approx(rawBound) end legalBound @@ -246,19 +337,50 @@ trait ConstraintHandling { def location(using Context) = "" // i"in ${ctx.typerState.stateChainStr}" // use for debugging - /** Make p2 = p1, transfer all bounds of p2 to p1 - * @pre less(p1)(p2) + /** Unify p1 with p2: one parameter will be kept in the constraint, the + * other will be removed and its bounds transferred to the remaining one. + * + * If p1 and p2 have different `nestingLevel`, the parameter with the lowest + * level will be kept and the transferred bounds from the other parameter + * will be adjusted for level-correctness. */ private def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { constr.println(s"unifying $p1 $p2") - assert(constraint.isLess(p1, p2)) - constraint = constraint.addLess(p2, p1) + if !constraint.isLess(p1, p2) then + constraint = constraint.addLess(p1, p2) + + val level1 = nestingLevel(p1) + val level2 = nestingLevel(p2) + val pKept = if level1 <= level2 then p1 else p2 + val pRemoved = if level1 <= level2 then p2 else p1 + + constraint = constraint.addLess(p2, p1, direction = if pKept eq p1 then KeepParam2 else KeepParam1) + + val boundKept = constraint.nonParamBounds(pKept).substParam(pRemoved, pKept) + var boundRemoved = constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept) + + if level1 != level2 then + boundRemoved = LevelAvoidMap(-1, math.min(level1, level2))(boundRemoved) + val TypeBounds(lo, hi) = boundRemoved + // After avoidance, the interval might be empty, e.g. in + // tests/pos/i8900-promote.scala: + // >: x.type <: Singleton + // becomes: + // >: Int <: Singleton + // In that case, we can still get a legal constraint + // by replacing the lower-bound to get: + // >: Int & Singleton <: Singleton + if !isSub(lo, hi) then + boundRemoved = TypeBounds(lo & hi, hi) + val down = constraint.exclusiveLower(p2, p1) val up = constraint.exclusiveUpper(p1, p2) - constraint = constraint.unify(p1, p2) - val bounds = constraint.nonParamBounds(p1) - val lo = bounds.lo - val hi = bounds.hi + + val newBounds = (boundKept & boundRemoved).bounds + constraint = constraint.updateEntry(pKept, newBounds).replace(pRemoved, pKept) + + val lo = newBounds.lo + val hi = newBounds.hi isSub(lo, hi) && down.forall(addOneBound(_, hi, isUpper = true)) && up.forall(addOneBound(_, lo, isUpper = false)) @@ -311,6 +433,7 @@ trait ConstraintHandling { final def approximation(param: TypeParamRef, fromBelow: Boolean)(using Context): Type = constraint.entry(param) match case entry: TypeBounds => + val maxLevel = nestingLevel(param) val useLowerBound = fromBelow || param.occursIn(entry.hi) val inst = if useLowerBound then fullLowerBound(param) else fullUpperBound(param) typr.println(s"approx ${param.show}, from below = $fromBelow, inst = ${inst.show}") diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index 3e68e9546553..4ebb3ab43a46 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -11,6 +11,7 @@ import collection.mutable import printing._ import scala.annotation.internal.sharable +import scala.annotation.unused /** Represents GADT constraints currently in scope */ sealed abstract class GadtConstraint extends Showable { diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index b77f870b72b7..2b3806e48f79 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -358,6 +358,13 @@ object NameKinds { val ProtectedAccessorName: PrefixNameKind = new PrefixNameKind(PROTECTEDACCESSOR, "protected$") val InlineAccessorName: PrefixNameKind = new PrefixNameKind(INLINEACCESSOR, "inline$") + /** See `ConstraintHandling#LevelAvoidMap`. */ + enum AvoidNameKind(tag: Int, prefix: String) extends PrefixNameKind(tag, prefix): + override def definesNewName = true + case UpperBound extends AvoidNameKind(AVOIDUPPER, "(upper)") + case LowerBound extends AvoidNameKind(AVOIDLOWER, "(lower)") + case BothBounds extends AvoidNameKind(AVOIDBOTH, "(avoid)") + val BodyRetainerName: SuffixNameKind = new SuffixNameKind(BODYRETAINER, "$retainedBody") val FieldName: SuffixNameKind = new SuffixNameKind(FIELD, "$$local") { override def mkString(underlying: TermName, info: ThisInfo) = underlying.toString diff --git a/compiler/src/dotty/tools/dotc/core/NameTags.scala b/compiler/src/dotty/tools/dotc/core/NameTags.scala index 7ee7758a506b..5628adbd6f89 100644 --- a/compiler/src/dotty/tools/dotc/core/NameTags.scala +++ b/compiler/src/dotty/tools/dotc/core/NameTags.scala @@ -32,6 +32,11 @@ object NameTags extends TastyFormat.NameTags { inline val SETTER = 34 // A synthesized += suffix. + // Name of type variables created by `ConstraintHandling#LevelAvoidMap`. + final val AVOIDUPPER = 35 + final val AVOIDLOWER = 36 + final val AVOIDBOTH = 37 + def nameTagToString(tag: Int): String = tag match { case UTF8 => "UTF8" case QUALIFIED => "QUALIFIED" diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index aa4262ad6e02..1f83224cc3e7 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -134,6 +134,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, private val lowerMap : ParamOrdering, private val upperMap : ParamOrdering) extends Constraint { + import UnificationDirection.* + type This = OrderingConstraint // ----------- Basic indices -------------------------------------------------- @@ -350,29 +352,37 @@ class OrderingConstraint(private val boundsMap: ParamBounds, /** Add the fact `param1 <: param2` to the constraint `current` and propagate * `<:<` relationships between parameters ("edges") but not bounds. */ - private def order(current: This, param1: TypeParamRef, param2: TypeParamRef)(using Context): This = + def order(current: This, param1: TypeParamRef, param2: TypeParamRef, direction: UnificationDirection = NoUnification)(using Context): This = if (param1 == param2 || current.isLess(param1, param2)) this else { assert(contains(param1), i"$param1") assert(contains(param2), i"$param2") - // Is `order` called during parameter unification? - val unifying = isLess(param2, param1) + val unifying = direction != NoUnification val newUpper = { val up = exclusiveUpper(param2, param1) if unifying then // Since param2 <:< param1 already holds now, filter out param1 to avoid adding // duplicated orderings. - param2 :: up.filterNot(_ eq param1) + val filtered = up.filterNot(_ eq param1) + // Only add bounds for param2 if it will be kept in the constraint after unification. + if direction == KeepParam2 then + param2 :: filtered + else + filtered else param2 :: up } val newLower = { val lower = exclusiveLower(param1, param2) if unifying then - // Do not add bounds for param1 since it will be unified to param2 soon. - // And, similarly filter out param2 from lowerly-ordered parameters + // Similarly, filter out param2 from lowerly-ordered parameters // to avoid duplicated orderings. - lower.filterNot(_ eq param2) + val filtered = lower.filterNot(_ eq param2) + // Only add bounds for param1 if it will be kept in the constraint after unification. + if direction == KeepParam1 then + param1 :: filtered + else + filtered else param1 :: lower } @@ -416,14 +426,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def updateEntry(param: TypeParamRef, tp: Type)(using Context): This = updateEntry(this, param, ensureNonCyclic(param, tp)).checkNonCyclic() - def addLess(param1: TypeParamRef, param2: TypeParamRef)(using Context): This = - order(this, param1, param2).checkNonCyclic() - - def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): This = - val bound1 = nonParamBounds(p1).substParam(p2, p1) - val bound2 = nonParamBounds(p2).substParam(p2, p1) - val p1Bounds = bound1 & bound2 - updateEntry(p1, p1Bounds).replace(p2, p1) + def addLess(param1: TypeParamRef, param2: TypeParamRef, direction: UnificationDirection)(using Context): This = + order(this, param1, param2, direction).checkNonCyclic() // ---------- Replacements and Removals ------------------------------------- diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index bc7970d423f5..95ad0b95b335 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -231,6 +231,16 @@ class TypeApplications(val self: Type) extends AnyVal { (alias ne self) && alias.hasSimpleKind } + /** The top type with the same kind as `self`. */ + def topType(using Context): Type = + if self.hasSimpleKind then + defn.AnyType + else EtaExpand(self.typeParams) match + case tp: HKTypeLambda => + tp.derivedLambdaType(resType = tp.resultType.topType) + case _ => + defn.AnyKindType + /** If self type is higher-kinded, its result type, otherwise NoType. * Note: The hkResult of an any-kinded type is again AnyKind. */ diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 68e2dc92303d..2d02546a6d5e 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4666,6 +4666,15 @@ object Types { * * @param origin The parameter that's tracked by the type variable. * @param creatorState The typer state in which the variable was created. + * @param nestingLevel Symbols with a nestingLevel strictly greater than this + * will not appear in the instantiation of this type variable. + * This is enforced in `ConstraintHandling` by: + * - Maintaining the invariant that the `nonParamBounds` + * of a type variable never refer to a type with a + * greater `nestingLevel` (see `legalBound` for the reason + * why this cannot be delayed until instantiation). + * - On instantiation, replacing any param in the param bound + * with a level greater than nestingLevel (see `fullLowerBound`). */ final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState, val nestingLevel: Int) extends CachedProxyType with ValueType { private var currentOrigin = initOrigin @@ -4774,8 +4783,8 @@ object Types { } } object TypeVar: - def apply(initOrigin: TypeParamRef, creatorState: TyperState)(using Context) = - new TypeVar(initOrigin, creatorState, ctx.nestingLevel) + def apply(using Context)(initOrigin: TypeParamRef, creatorState: TyperState, nestingLevel: Int = ctx.nestingLevel) = + new TypeVar(initOrigin, creatorState, nestingLevel) type TypeVars = SimpleIdentitySet[TypeVar] diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 426543879103..17df9c93f9a9 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -7,7 +7,7 @@ import ast._ import Contexts._, Types._, Flags._, Symbols._ import Trees._ import ProtoTypes._ -import NameKinds.UniqueName +import NameKinds.{AvoidNameKind, UniqueName} import util.Spans._ import util.{Stats, SimpleIdentityMap} import Decorators._ @@ -553,7 +553,8 @@ trait Inferencing { this: Typer => * @param locked the set of type variables of the current typer state that cannot be interpolated * at the present time * Eligible for interpolation are all type variables owned by the current typerstate - * that are not in locked. Type variables occurring co- (respectively, contra-) variantly in the tree type + * that are not in `locked` and whose `nestingLevel` is `>= ctx.nestingLevel`. + * Type variables occurring co- (respectively, contra-) variantly in the tree type * or expected type are minimized (respectvely, maximized). Non occurring type variables are minimized if they * have a lower bound different from Nothing, maximized otherwise. Type variables appearing * non-variantly in the type are left untouched. @@ -615,17 +616,35 @@ trait Inferencing { this: Typer => type InstantiateQueue = mutable.ListBuffer[(TypeVar, Boolean)] val toInstantiate = new InstantiateQueue for tvar <- qualifying do - if !tvar.isInstantiated && constraint.contains(tvar) then + if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then constrainIfDependentParamRef(tvar, tree) // Needs to be checked again, since previous interpolations could already have // instantiated `tvar` through unification. val v = vs(tvar) if v == null then - typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = ${tvar.hasLowerBound}, $constraint") - toInstantiate += ((tvar, tvar.hasLowerBound)) + // Even though `tvar` is non-occurring in `v`, the specific + // instantiation we pick still matters because `tvar` might appear + // in the bounds of a non-`qualifying` type variable in the + // constraint. + // In particular, if `tvar` was created as the upper or lower + // bound of an existing variable by `LevelAvoidMap`, we + // instantiate it in the direction corresponding to the + // original variable which might be further constrained later. + // Otherwise, we simply rely on `hasLowerBound`. + val name = tvar.origin.paramName + val fromBelow = + name.is(AvoidNameKind.UpperBound) || + !name.is(AvoidNameKind.LowerBound) && tvar.hasLowerBound + typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") + toInstantiate += ((tvar, fromBelow)) else if v.intValue != 0 then typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") toInstantiate += ((tvar, v.intValue == 1)) + else if tvar.nestingLevel > ctx.nestingLevel then + // Invariant: a type variable of level N can only appear + // in the type of a tree whose enclosing scope is level <= N. + typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") + comparing(_.atLevel(ctx.nestingLevel, tvar.origin)) else typr.println(i"no interpolation for nonvariant $tvar in $state") @@ -687,26 +706,26 @@ trait Inferencing { this: Typer => * type if necessary to make it a Singleton. */ private def constrainIfDependentParamRef(tvar: TypeVar, call: Tree)(using Context): Unit = - representedParamRef(tvar) match - case ref: TermParamRef => - - def findArg(tree: Tree)(using Context): Tree = tree match - case Apply(fn, args) => - if fn.tpe.widen eq ref.binder then - if ref.paramNum < args.length then args(ref.paramNum) - else EmptyTree - else findArg(fn) - case TypeApply(fn, _) => findArg(fn) - case Block(_, expr) => findArg(expr) - case Inlined(_, _, expr) => findArg(expr) - case _ => EmptyTree - - val arg = findArg(call) - if !arg.isEmpty then - var argType = arg.tpe.widenIfUnstable - if !argType.isSingleton then argType = SkolemType(argType) - argType <:< tvar - case _ => + if tvar.origin.paramName.is(NameKinds.DepParamName) then + representedParamRef(tvar.origin) match + case ref: TermParamRef => + def findArg(tree: Tree)(using Context): Tree = tree match + case Apply(fn, args) => + if fn.tpe.widen eq ref.binder then + if ref.paramNum < args.length then args(ref.paramNum) + else EmptyTree + else findArg(fn) + case TypeApply(fn, _) => findArg(fn) + case Block(_, expr) => findArg(expr) + case Inlined(_, _, expr) => findArg(expr) + case _ => EmptyTree + + val arg = findArg(call) + if !arg.isEmpty then + var argType = arg.tpe.widenIfUnstable + if !argType.isSingleton then argType = SkolemType(argType) + argType <:< tvar + case _ => end constrainIfDependentParamRef } diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index fcedd8a0cd56..800af19e9f78 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -628,7 +628,11 @@ object ProtoTypes { * for each parameter. * @return The added type lambda, and the list of created type variables. */ - def constrained(tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean)(using Context): (TypeLambda, List[TypeTree]) = { + def constrained(using Context)( + tl: TypeLambda, owningTree: untpd.Tree, + alwaysAddTypeVars: Boolean, + nestingLevel: Int = ctx.nestingLevel + ): (TypeLambda, List[TypeTree]) = { val state = ctx.typerState val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty if (tl.isInstanceOf[PolyType]) @@ -640,7 +644,7 @@ object ProtoTypes { for (paramRef <- tl.paramRefs) yield { val tt = InferredTypeTree().withSpan(owningTree.span) - val tvar = TypeVar(paramRef, state) + val tvar = TypeVar(paramRef, state, nestingLevel) state.ownedVars += tvar tt.withType(tvar) } @@ -664,36 +668,42 @@ object ProtoTypes { val targs = constrained(tl, ast.tpd.EmptyTree, alwaysAddTypeVars = true)._2 tl.instantiate(targs.tpes) - /** A new type variable with given bounds for its origin. - * @param represents If exists, the TermParamRef that the TypeVar represents - * in the substitution generated by `resultTypeApprox` + /** A fresh type variable added to the current constraint. + * @param bounds The initial bounds of the variable + * @param name The name of the variable, defaults a fresh `DepParamName` + * @param nestingLevel See `TypeVar#nestingLevel` + * @param represents If it exists, a ParamRef that this TypeVar represents, + * to be retrieved using `representedParamRef`. + * in the substitution generated by `resultTypeApprox` * If `represents` exists, it is stored in the result type of the PolyType * that backs the TypeVar, to be retrieved by `representedParamRef`. */ - def newTypeVar(bounds: TypeBounds, represents: Type = NoType)(using Context): TypeVar = { - val poly = PolyType(DepParamName.fresh().toTypeName :: Nil)( + def newTypeVar(using Context)( + bounds: TypeBounds, name: TypeName = DepParamName.fresh().toTypeName, + nestingLevel: Int = ctx.nestingLevel, represents: Type = NoType): TypeVar = + val poly = PolyType(name :: Nil)( pt => bounds :: Nil, pt => represents.orElse(defn.AnyType)) - constrained(poly, untpd.EmptyTree, alwaysAddTypeVars = true) + constrained(poly, untpd.EmptyTree, alwaysAddTypeVars = true, nestingLevel) ._2.head.tpe.asInstanceOf[TypeVar] - } - /** If `tvar` represents a parameter of a dependent function generated - * by `newDepVar` called from `resultTypeApprox, the term parameter reference - * for which the variable was substituted. Otherwise, NoType. + /** If `param` was created using `newTypeVar(..., represents = X)`, returns X. + * This is used in: + * - `Inferencing#constrainIfDependentParamRef` to retrieve the dependent function + * parameter for which the variable was substituted. + * - `ConstraintHandling#LevelAvoidMap#legalVar` to retrieve the type variable that was + * avoided in a previous call to `legalVar`. */ - def representedParamRef(tvar: TypeVar)(using Context): Type = - if tvar.origin.paramName.is(DepParamName) then - tvar.origin.binder.resultType match - case ref: TermParamRef => ref - case _ => NoType - else NoType + def representedParamRef(param: TypeParamRef)(using Context): Type = + param.binder.resultType match + case ref: ParamRef => ref + case _ => NoType /** Create a new TypeVar that represents a dependent method parameter singleton `ref` */ def newDepTypeVar(ref: TermParamRef)(using Context): TypeVar = newTypeVar( TypeBounds.upper(AndType(ref.underlying.widenExpr, defn.SingletonClass.typeRef)), - ref) + represents = ref) /** The result type of `mt`, where all references to parameters of `mt` are * replaced by either wildcards or TypeParamRefs. diff --git a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetChecker.scala b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetChecker.scala index 906c0d2434b0..77ab539d34fa 100644 --- a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetChecker.scala +++ b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetChecker.scala @@ -25,7 +25,8 @@ class SnippetChecker(val args: Scaladoc.Args)(using cctx: CompilerContext): args.classpath ).mkString(sep) - private val snippetCompilerSettings: Seq[SnippetCompilerSetting[_]] = cctx.settings.userSetSettings(cctx.settingsState).filter(_ != cctx.settings.classpath).map( s => + private val snippetCompilerSettings: Seq[SnippetCompilerSetting[?]] = cctx.settings.userSetSettings(cctx.settingsState).filter(_ != cctx.settings.classpath) + .map[SnippetCompilerSetting[?]]( s => SnippetCompilerSetting(s, s.valueIn(cctx.settingsState)) ) :+ SnippetCompilerSetting(cctx.settings.classpath, fullClasspath) diff --git a/tests/neg-macros/i13809/Macros_1.scala b/tests/neg-macros/i13809/Macros_1.scala index 774a8bf195ba..3c39a40a7c51 100644 --- a/tests/neg-macros/i13809/Macros_1.scala +++ b/tests/neg-macros/i13809/Macros_1.scala @@ -221,7 +221,7 @@ object Async: }).transformed ) case _ => ??? case Block(prevs,last) => - val rPrevs = prevs.map{ p => + val rPrevs = prevs.map[CpsExpr[?]]{ p => p match case v@ValDef(vName,vtt,optRhs) => optRhs.get.tpe.widen.asType match diff --git a/tests/neg/i12284.check b/tests/neg/i12284.check index 2f3adee6e948..00e445d3c41c 100644 --- a/tests/neg/i12284.check +++ b/tests/neg/i12284.check @@ -1,41 +1,9 @@ --- Error: tests/neg/i12284.scala:5:36 ---------------------------------------------------------------------------------- -5 | val xx: Vector[F[_]] = deps.map(i => magic(i)) // error // error // error - | ^^^^^^^^^^^^^ - | return type B of lambda cannot be made hygienic; - | it is not a supertype of the hygienic type Any --- Error: tests/neg/i12284.scala:5:33 ---------------------------------------------------------------------------------- -5 | val xx: Vector[F[_]] = deps.map(i => magic(i)) // error // error // error - | ^ - | Inaccessible variables captured in instantation of type variable B. - | F[i.A] was fixed to Any, - | but the latter type does not conform to the upper bound F[?] --- Error: tests/neg/i12284.scala:5:6 ----------------------------------------------------------------------------------- -5 | val xx: Vector[F[_]] = deps.map(i => magic(i)) // error // error // error - | ^ - | Inaccessible variables captured in instantation of type variable B. - | F[i.A] was fixed to Any, - | but the latter type does not conform to the upper bound F[?] --- Error: tests/neg/i12284.scala:4:2 ----------------------------------------------------------------------------------- -4 | val deps: Vector[I[F, _]] = ??? // error - | ^ - | Inaccessible variables captured in instantation of type variable B. - | F[i.A] was fixed to Any, - | but the latter type does not conform to the upper bound F[?] --- Error: tests/neg/i12284.scala:3:4 ----------------------------------------------------------------------------------- -3 |def magic[F[_], A](in: I[F, A]): F[A] = // error // error - | ^ - | Inaccessible variables captured in instantation of type variable B. - | F[i.A] was fixed to Any, - | but the latter type does not conform to the upper bound F[?] --- Error: tests/neg/i12284.scala:3:0 ----------------------------------------------------------------------------------- -3 |def magic[F[_], A](in: I[F, A]): F[A] = // error // error - |^ - |Inaccessible variables captured in instantation of type variable B. - |F[i.A] was fixed to Any, - |but the latter type does not conform to the upper bound F[?] --- Error: tests/neg/i12284.scala:1:0 ----------------------------------------------------------------------------------- -1 |trait I[F[_], A] // error - |^ - |Inaccessible variables captured in instantation of type variable B. - |F[i.A] was fixed to Any, - |but the latter type does not conform to the upper bound F[?] +-- [E007] Type Mismatch Error: tests/neg/i12284.scala:6:26 ------------------------------------------------------------- +6 | val y: Vector[F[Any]] = xx // error + | ^^ + | Found: (xx : Vector[Any]) + | Required: Vector[F[Any]] + | + | where: F is a type in method magic with bounds <: [_] =>> Any + +longer explanation available when compiling with `-explain` diff --git a/tests/neg/i12284.scala b/tests/neg/i12284.scala index 302ed6468f1f..11635a1638cb 100644 --- a/tests/neg/i12284.scala +++ b/tests/neg/i12284.scala @@ -1,6 +1,7 @@ -trait I[F[_], A] // error +trait I[F[_], A] -def magic[F[_], A](in: I[F, A]): F[A] = // error // error - val deps: Vector[I[F, _]] = ??? // error - val xx: Vector[F[_]] = deps.map(i => magic(i)) // error // error // error - ??? \ No newline at end of file +def magic[F[_], A](in: I[F, A]): F[A] = + val deps: Vector[I[F, _]] = ??? + val xx = deps.map(i => magic(i)) + val y: Vector[F[Any]] = xx // error + ??? diff --git a/tests/neg/i864.scala b/tests/pos/i864.scala similarity index 100% rename from tests/neg/i864.scala rename to tests/pos/i864.scala diff --git a/tests/pos/i8900-cycle.scala b/tests/pos/i8900-cycle.scala new file mode 100644 index 000000000000..3b6ae214cb42 --- /dev/null +++ b/tests/pos/i8900-cycle.scala @@ -0,0 +1,15 @@ +trait Contra[-A] +trait Cov[+B] + +trait Test { + def foo[S](x: S): S + def rec1[T <: Cov[T]]: Contra[T] + def rec2[T <: Cov[U], U <: T]: Contra[T] + + val a = foo({ + rec1 + }) + val b = foo({ + rec2 + }) +} diff --git a/tests/pos/i8900-polyfunction.scala b/tests/pos/i8900-polyfunction.scala new file mode 100644 index 000000000000..2836283b3c65 --- /dev/null +++ b/tests/pos/i8900-polyfunction.scala @@ -0,0 +1,5 @@ +object Test { + def f[F](f: [t] => t => F): Unit = () + + f([t] => (x: t) => x) +} diff --git a/tests/pos/i8900-promote.scala b/tests/pos/i8900-promote.scala new file mode 100644 index 000000000000..7d3a2ff96bed --- /dev/null +++ b/tests/pos/i8900-promote.scala @@ -0,0 +1,18 @@ +class Inv[A <: Singleton](x: A) +object Inv { + def empty[A <: Singleton]: Inv[A] = new Inv(???) +} + +class Inv2[A](x: A) +object Inv2 { + def empty[A]: Inv2[A] = new Inv2(???) +} + +object Test { + def inv(cond: Boolean) = // used to leak: Inv[x.type] + if (cond) + val x: Int = 1 + new Inv(x) + else + Inv.empty +} diff --git a/tests/pos/i8900-unflip.scala b/tests/pos/i8900-unflip.scala new file mode 100644 index 000000000000..568e9d22530e --- /dev/null +++ b/tests/pos/i8900-unflip.scala @@ -0,0 +1,18 @@ +// Minimized from PLens.scala in scalaz + +class PLensFamily[A1, A2, B1, B2] + +class LazyOption[A3] { + def fold[X](some: (=> A3) => X, none: => X): X = ??? +} +class IndexedStore[I, A4, B4](run: (A4 => B4, I)) + +object PL { + + def plensFamily[A1x, A2x, B1x, B2x](r: A1x => Option[IndexedStore[B1x, B2x, A2x]]): PLensFamily[A1x, A2x, B1x, B2x] = ??? + + def lazySome[T](a: => T): LazyOption[T] = ??? + + def lazySomePLensFamily[A1y, A2y]: PLensFamily[LazyOption[A1y], LazyOption[A2y], A1y, A2y] = + plensFamily(_.fold(z => Some(IndexedStore(lazySome(_), z)), None)) +} diff --git a/tests/pos/i8900a2.scala b/tests/pos/i8900a2.scala new file mode 100644 index 000000000000..50a263ff7cc2 --- /dev/null +++ b/tests/pos/i8900a2.scala @@ -0,0 +1,12 @@ +class ContraCo[-T, +S](val t: S) +class CoContra[+T, -S](val t: T) +object Test { + def unwrap[Outer](inv: CoContra[Outer, Outer]): Outer = inv.t + def wrap[Inner](i: Inner): CoContra[Inner, Inner] = new CoContra(i) + + val a = unwrap({ + class Local + val local = new Local + wrap(local) + }) +} diff --git a/tests/pos/i8900a3.scala b/tests/pos/i8900a3.scala new file mode 100644 index 000000000000..d43f19fcaab6 --- /dev/null +++ b/tests/pos/i8900a3.scala @@ -0,0 +1,13 @@ +class ContraCo[-T, +S](val t: S) +class CoContra[+T, -S](val t: T) +object Test { + type Id[T] = T + def unwrap[Outer](inv: CoContra[Outer, Outer]): Outer = inv.t + def wrap[Inner](i: Inner): CoContra[Id[Inner], Id[Inner]] = new CoContra(i) + + val a = unwrap({ + class Local + val local = new Local + wrap(local) + }) +} From ae1b00d6235f394520ee7b5b062bada4d25a3422 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sun, 21 Nov 2021 19:13:55 +0100 Subject: [PATCH 10/11] Check that we pickle a definition before its references ... except for pattern-bound symbols because in `case x: List[t]` we pickle `List[t]` before we pickle `t` itself. The issue with t1957.scala is fixed (after staying as a TODO for 5 years!) by the level-checking improvements in the previous commits of this PR. --- compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index a195b157cacd..8f5910c3dd56 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -89,11 +89,6 @@ class TreePickler(pickler: TastyPickler) { case Some(label) => if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym) case None => - // See pos/t1957.scala for an example where this can happen. - // I believe it's a bug in typer: the type of an implicit argument refers - // to a closure parameter outside the closure itself. TODO: track this down, so that we - // can eliminate this case. - report.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.srcPos) pickleForwardSymRef(sym) } @@ -206,6 +201,8 @@ class TreePickler(pickler: TastyPickler) { } else if (tpe.prefix == NoPrefix) { writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect) + if !symRefs.contains(sym) && !sym.isPatternBound && !sym.hasAnnotation(defn.QuotedRuntimePatterns_patternTypeAnnot) then + report.error(i"pickling reference to as yet undefined $tpe with symbol ${sym}", sym.srcPos) pickleSymRef(sym) } else tpe.designator match { From 629006b3e5c456d5073e5eda2fa9c39ddc32a135 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sun, 21 Nov 2021 19:16:45 +0100 Subject: [PATCH 11/11] Remove now-unnecessary `avoid` when typing closures This is no longer needed thanks to the improvements to avoidance in this PR. --- compiler/src/dotty/tools/dotc/typer/Namer.scala | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index aa213d03b4e9..073ad0704640 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1479,22 +1479,7 @@ class Namer { typer: Typer => // This case applies if the closure result type contains uninstantiated // type variables. In this case, constrain the closure result from below // by the parameter-capture-avoiding type of the body. - val rhsType = typedAheadExpr(mdef.rhs, tpt.tpe).tpe - - // The following part is important since otherwise we might instantiate - // the closure result type with a plain functon type that refers - // to local parameters. An example where this happens in `dependent-closures.scala` - // If the code after `val rhsType` is commented out, this file fails pickling tests. - // AVOIDANCE TODO: Follow up why this happens, and whether there - // are better ways to achieve this. It would be good if we could get rid of this code. - // It seems at least partially redundant with the nesting level checking on TypeVar - // instantiation. - val hygienicType = TypeOps.avoid(rhsType, termParamss.flatten) - if (!hygienicType.isValueType || !(hygienicType <:< tpt.tpe)) - report.error(i"return type ${tpt.tpe} of lambda cannot be made hygienic;\n" + - i"it is not a supertype of the hygienic type $hygienicType", mdef.srcPos) - //println(i"lifting $rhsType over $termParamss -> $hygienicType = ${tpt.tpe}") - //println(TypeComparer.explained { implicit ctx => hygienicType <:< tpt.tpe }) + typedAheadExpr(mdef.rhs, tpt.tpe).tpe case _ => } WildcardType