Skip to content

Commit

Permalink
Backport "fix typo" to 3.5.2 (#21480)
Browse files Browse the repository at this point in the history
Backports #21324 to the 3.5.2 branch.

PR submitted by the release tooling.
[skip ci]
  • Loading branch information
WojciechMazur authored Aug 28, 2024
2 parents 0492a30 + f612562 commit b474c9f
Show file tree
Hide file tree
Showing 40 changed files with 45 additions and 45 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) {
if (file.isInstanceOf[JarArchive]) {
val jarCompressionLevel = compilerSettings.jarCompressionLevel
// Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where
// created using `AbstractFile.bufferedOutputStream`instead of JarWritter
// created using `AbstractFile.bufferedOutputStream`instead of JarWriter
val jarFile = file.underlyingSource.getOrElse{
throw new IllegalStateException("No underlying source for jar")
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/CompilationUnit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn
*/
val depRecorder: sbt.DependencyRecorder = sbt.DependencyRecorder()

/** Suspends the compilation unit by thowing a SuspendException
/** Suspends the compilation unit by throwing a SuspendException
* and recording the suspended compilation unit
*/
def suspend(hint: => String)(using Context): Nothing =
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/Compiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ class Compiler {
new LetOverApply, // Lift blocks from receivers of applications
new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify.
List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements.
List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types
List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types
new PureStats, // Remove pure stats from blocks
new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations
new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ abstract class TreeMapWithTrackedStats extends TreeMapWithImplicits:
case _ => tree
end updateTracked

/** Process a list of trees and give the priority to trakced trees */
/** Process a list of trees and give the priority to tracked trees */
private final def withUpdatedTrackedTrees(stats: List[Tree])(using Context) =
val trackedTrees = TreeMapWithTrackedStats.trackedTrees
stats.mapConserve:
Expand Down Expand Up @@ -67,7 +67,7 @@ end TreeMapWithTrackedStats
object TreeMapWithTrackedStats:
private val TrackedTrees = new Property.Key[mutable.Map[Symbol, tpd.MemberDef]]

/** Fetch the tracked trees in the cuurent context */
/** Fetch the tracked trees in the current context */
private def trackedTrees(using Context): mutable.Map[Symbol, MemberDef] =
ctx.property(TrackedTrees).get

Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/cc/CaptureOps.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ private val Captures: Key[CaptureSet] = Key()

object ccConfig:

/** If true, allow mappping capture set variables under captureChecking with maps that are neither
/** If true, allow mapping capture set variables under captureChecking with maps that are neither
* bijective nor idempotent. We currently do now know how to do this correctly in all
* cases, though.
*/
Expand Down
4 changes: 2 additions & 2 deletions compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ object CheckCaptures:
/** A class describing environments.
* @param owner the current owner
* @param kind the environment's kind
* @param captured the caputure set containing all references to tracked free variables outside of boxes
* @param captured the capture set containing all references to tracked free variables outside of boxes
* @param outer0 the next enclosing environment
*/
case class Env(
Expand Down Expand Up @@ -460,7 +460,7 @@ class CheckCaptures extends Recheck, SymTransformer:
val meth = tree.fun.symbol
includeCallCaptures(meth, tree.srcPos)

// Unsafe box/unbox handlng, only for versions < 3.3
// Unsafe box/unbox handling, only for versions < 3.3
def mapArgUsing(f: Type => Type) =
val arg :: Nil = tree.args: @unchecked
val argType0 = f(recheckStart(arg, pt))
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/core/Annotations.scala
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ object Annotations {
def argumentConstantString(i: Int)(using Context): Option[String] =
for (case Constant(s: String) <- argumentConstant(i)) yield s

/** The tree evaluaton is in progress. */
/** The tree evaluation is in progress. */
def isEvaluating: Boolean = false

/** The tree evaluation has finished. */
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/core/Contexts.scala
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ object Contexts {
/** SourceFile with given path, memoized */
def getSource(path: String): SourceFile = getSource(path.toTermName)

/** AbstraFile with given path name, memoized */
/** AbstractFile with given path name, memoized */
def getFile(name: TermName): AbstractFile = base.files.get(name) match
case Some(file) =>
file
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ object CommentPickler:
def traverse(x: Any): Unit = x match
case x: untpd.Tree @unchecked =>
x match
case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d.
case x: tpd.MemberDef @unchecked => // at this point all MemberDefs are t(y)p(e)d.
for comment <- docString(x) do pickleComment(addrOfTree(x), comment)
case _ =>
val limit = x.productArity
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/parsing/Scanners.scala
Original file line number Diff line number Diff line change
Expand Up @@ -684,7 +684,7 @@ object Scanners {
if !r.isOutermost
&& closingRegionTokens.contains(token)
&& !(token == CASE && r.prefix == MATCH)
&& next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.sala
&& next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.scala
=>
insert(OUTDENT, offset)
case _ =>
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/transform/CtxLazy.scala
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import scala.compiletime.uninitialized
* with a different context.
*
* A typical use case is a lazy val in a phase object which exists once per root context where
* the expression intiializing the lazy val depends only on the root context, but not any changes afterwards.
* the expression initializing the lazy val depends only on the root context, but not any changes afterwards.
*/
class CtxLazy[T](expr: Context ?=> T) {
private var myValue: T = uninitialized
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import NameKinds.SuperAccessorName

object ElimErasedValueType {
val name: String = "elimErasedValueType"
val description: String = "expand erased value types to their underlying implmementation types"
val description: String = "expand erased value types to their underlying implementation types"

def elimEVT(tp: Type)(using Context): Type = tp match {
case ErasedValueType(_, underlying) =>
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ class ExpandSAMs extends MiniPhase:
* }
* ```
*
* is expanded to an anomymous class:
* is expanded to an anonymous class:
*
* ```
* val x: PartialFunction[A, B] = {
Expand Down
4 changes: 2 additions & 2 deletions compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase =>
val parentCls = parent.tpe.classSymbol.asClass
parent match
// if we are in a regular class and first parent is also a regular class,
// make sure we have a contructor
// make sure we have a constructor
case parent: TypeTree
if !cls.is(Trait) && !parentCls.is(Trait) && !defn.NotRuntimeClasses.contains(parentCls) =>
New(parent.tpe, Nil).withSpan(impl.span)
Expand Down Expand Up @@ -454,7 +454,7 @@ object ExplicitOuter {
val enclClass = ctx.owner.lexicallyEnclosingClass.asClass
val outerAcc = atPhaseNoLater(lambdaLiftPhase) {
// lambdalift mangles local class names, which means we cannot
// reliably find outer acessors anymore
// reliably find outer accessors anymore
tree match
case tree: This if tree.symbol == enclClass && !enclClass.is(Trait) =>
outerParamAccessor(enclClass)
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/transform/Pickler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ object Pickler {
*/
inline val ParallelPickling = true

/**A holder for syncronization points and reports when writing TASTy asynchronously.
/**A holder for synchronization points and reports when writing TASTy asynchronously.
* The callbacks should only be called once.
*/
class AsyncTastyHolder private (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ trait ReifiedReflect:
.select(defn.Quotes_reflect_TypeApply_apply)
.appliedTo(fn, argTrees)

/** Create tree for `quotes.reflect.Assing(<lhs>, <rhs>)` */
/** Create tree for `quotes.reflect.Assign(<lhs>, <rhs>)` */
def Assign(lhs: Tree, rhs: Tree)(using Context) =
self.select(defn.Quotes_reflect_Assign)
.select(defn.Quotes_reflect_Assign_apply)
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/transform/SelectStatic.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import dotty.tools.dotc.transform.MegaPhase.*
* Otherwise, the backend needs to be aware that some qualifiers need to be
* dropped.
*
* A tranformation similar to what this phase does seems to be performed by
* A transformation similar to what this phase does seems to be performed by
* flatten in nsc.
*
* The side effects of the qualifier of a dropped `Select` is normally
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/typer/Synthesizer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
MirrorSource.reduce(mirroredType) match
case Right(msrc) => msrc match
case MirrorSource.Singleton(_, tref) =>
val singleton = tref.termSymbol // prefer alias name over the orignal name
val singleton = tref.termSymbol // prefer alias name over the original name
val singletonPath = tpd.singleton(tref).withSpan(span)
if tref.classSymbol.is(Scala2x) then // could be Scala 3 alias of Scala 2 case object.
val mirrorType = formal.constrained_& {
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/typer/Typer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1269,7 +1269,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer
* For example, both `@Annot(5)` and `@Annot({5, 6}) are viable calls of the constructor
* of annotation defined as `@interface Annot { int[] value() }`
* We assume that calling `typedNamedArg` in context of Java implies that we are dealing
* with annotation contructor, as named arguments are not allowed anywhere else in Java.
* with annotation constructor, as named arguments are not allowed anywhere else in Java.
* Under explicit nulls, the pt could be nullable. We need to strip `Null` type first.
*/
val arg1 = pt.stripNull() match {
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/io/FileWriters.scala
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ object FileWriters {
if (file.isInstanceOf[JarArchive]) {
val jarCompressionLevel = ctx.settings.jarCompressionLevel
// Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where
// created using `AbstractFile.bufferedOutputStream`instead of JarWritter
// created using `AbstractFile.bufferedOutputStream`instead of JarWriter
val jarFile = file.underlyingSource.getOrElse{
throw new IllegalStateException("No underlying source for jar")
}
Expand Down
2 changes: 1 addition & 1 deletion docs/_docs/contributing/architecture/phases.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ Finally are [staging], which ensures that quotes conform to the
trees to embedded TASTy strings.

### `transformPhases`
These phases are concerned with tranformation into lower-level forms
These phases are concerned with transformation into lower-level forms
suitable for the runtime system, with two sub-groupings:
- High-level transformations: All phases from [firstTransform] to [erasure].
Most of these phases transform syntax trees, expanding high-level constructs
Expand Down
2 changes: 1 addition & 1 deletion docs/_docs/internals/overall-structure.md
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ phases. The current list of phases is specified in class [Compiler] as follows:
new LetOverApply, // Lift blocks from receivers of applications
new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify.
List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements.
List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types
List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types
new PureStats, // Remove pure stats from blocks
new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations
new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -583,7 +583,7 @@ class CompletionArgSuite extends BaseCompletionSuite:
|""".stripMargin
)

@Test def `contructor-param` =
@Test def `constructor-param` =
check(
"""|class Foo (xxx: Int)
|
Expand All @@ -595,7 +595,7 @@ class CompletionArgSuite extends BaseCompletionSuite:
|""".stripMargin
)

@Test def `contructor-param2` =
@Test def `constructor-param2` =
check(
"""|class Foo ()
|
Expand Down
2 changes: 1 addition & 1 deletion scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ trait BasicSupport:
"scala.transient",
"scala.volatile",
"scala.annotation.experimental",
"scala.annotation.contructorOnly",
"scala.annotation.constructorOnly",
"scala.annotation.static",
"scala.annotation.targetName",
"scala.annotation.threadUnsafe",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ abstract class TreeInterpreter[Q <: Quotes & Singleton](using val q: Q) {
}

case Assign(lhs, rhs) =>
log("<interpretAssing>", tree)(localValue(lhs.symbol).update(eval(rhs)))
log("<interpretAssign>", tree)(localValue(lhs.symbol).update(eval(rhs)))

case If(cond, thenp, elsep) => log("interpretIf", tree)(interpretIf(cond, thenp, elsep))
case While(cond, body) => log("interpretWhile", tree)(interpretWhile(cond, body))
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/CompilationUnit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ class CompilationUnit protected (val source: SourceFile) {
/** Can this compilation unit be suspended */
def isSuspendable: Boolean = true

/** Suspends the compilation unit by thowing a SuspendException
/** Suspends the compilation unit by throwing a SuspendException
* and recording the suspended compilation unit
*/
def suspend()(using Context): Nothing =
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/Compiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ class Compiler {
new LetOverApply, // Lift blocks from receivers of applications
new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify.
List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements.
List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types
List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types
new PureStats, // Remove pure stats from blocks
new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations
new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/config/Config.scala
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ object Config {
*/
inline val printCaptureSetsAsPrefix = true

/** If true, allow mappping capture set variables under captureChecking with maps that are neither
/** If true, allow mapping capture set variables under captureChecking with maps that are neither
* bijective nor idempotent. We currently do now know how to do this correctly in all
* cases, though.
*/
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/core/Contexts.scala
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ object Contexts {
/** SourceFile with given path, memoized */
def getSource(path: String): SourceFile = getSource(path.toTermName)

/** AbstraFile with given path name, memoized */
/** AbstractFile with given path name, memoized */
def getFile(name: TermName): AbstractFile = base.files.get(name) match
case Some(file) =>
file
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class CommentPickler(pickler: TastyPickler, addrOfTree: tpd.Tree => Addr, docStr
private def traverse(x: Any): Unit = x match
case x: untpd.Tree @unchecked =>
x match
case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d.
case x: tpd.MemberDef @unchecked => // at this point all MemberDefs are t(y)p(e)d.
for comment <- docString(x) do pickleComment(addrOfTree(x), comment)
case _ =>
val limit = x.productArity
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala
Original file line number Diff line number Diff line change
Expand Up @@ -667,7 +667,7 @@ object Scanners {
if !r.isOutermost
&& closingRegionTokens.contains(token)
&& !(token == CASE && r.prefix == MATCH)
&& next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.sala
&& next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.scala
=>
insert(OUTDENT, offset)
case _ =>
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/transform/CtxLazy.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import core.Contexts._
* with a different context.
*
* A typical use case is a lazy val in a phase object which exists once per root context where
* the expression intiializing the lazy val depends only on the root context, but not any changes afterwards.
* the expression initializing the lazy val depends only on the root context, but not any changes afterwards.
*/
class CtxLazy[T](expr: Context ?=> T) {
private var myValue: T = _
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import NameKinds.SuperAccessorName

object ElimErasedValueType {
val name: String = "elimErasedValueType"
val description: String = "expand erased value types to their underlying implmementation types"
val description: String = "expand erased value types to their underlying implementation types"

def elimEVT(tp: Type)(using Context): Type = tp match {
case ErasedValueType(_, underlying) =>
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/transform/ExpandSAMs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ class ExpandSAMs extends MiniPhase:
* }
* ```
*
* is expanded to an anomymous class:
* is expanded to an anonymous class:
*
* ```
* val x: PartialFunction[A, B] = {
Expand Down
4 changes: 2 additions & 2 deletions tests/pos-with-compiler-cc/dotc/transform/ExplicitOuter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase =>
val parentCls = parent.tpe.classSymbol.asClass
parent match
// if we are in a regular class and first parent is also a regular class,
// make sure we have a contructor
// make sure we have a constructor
case parent: TypeTree
if !cls.is(Trait) && !parentCls.is(Trait) && !defn.NotRuntimeClasses.contains(parentCls) =>
New(parent.tpe, Nil).withSpan(impl.span)
Expand Down Expand Up @@ -459,7 +459,7 @@ object ExplicitOuter {
val enclClass = ctx.owner.lexicallyEnclosingClass.asClass
val outerAcc = atPhaseNoLater(lambdaLiftPhase) {
// lambdalift mangles local class names, which means we cannot
// reliably find outer acessors anymore
// reliably find outer accessors anymore
tree match
case tree: This if tree.symbol == enclClass && !enclClass.is(Trait) =>
outerParamAccessor(enclClass)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ trait ReifiedReflect:
.select(defn.Quotes_reflect_TypeApply_apply)
.appliedTo(fn, argTrees)

/** Create tree for `quotes.reflect.Assing(<lhs>, <rhs>)` */
/** Create tree for `quotes.reflect.Assign(<lhs>, <rhs>)` */
def Assign(lhs: Tree, rhs: Tree)(using Context) =
self.select(defn.Quotes_reflect_Assign)
.select(defn.Quotes_reflect_Assign_apply)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import dotty.tools.dotc.transform.SymUtils._
* Otherwise, the backend needs to be aware that some qualifiers need to be
* dropped.
*
* A tranformation similar to what this phase does seems to be performed by
* A transformation similar to what this phase does seems to be performed by
* flatten in nsc.
*
* The side effects of the qualifier of a dropped `Select` is normally
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
MirrorSource.reduce(mirroredType) match
case Right(msrc) => msrc match
case MirrorSource.Singleton(_, tref) =>
val singleton = tref.termSymbol // prefer alias name over the orignal name
val singleton = tref.termSymbol // prefer alias name over the original name
val singletonPath = pathFor(tref).withSpan(span)
if tref.classSymbol.is(Scala2x) then // could be Scala 3 alias of Scala 2 case object.
val mirrorType = formal.constrained_& {
Expand Down
Loading

0 comments on commit b474c9f

Please sign in to comment.