diff --git a/project/Mima.scala b/project/Mima.scala index f7a6fec96..4186318f4 100644 --- a/project/Mima.scala +++ b/project/Mima.scala @@ -24,11 +24,14 @@ object Mima { ProblemFilters.exclude[DirectMissingMethodProblem]( "scalafix.rule.RuleCtx.printLintMessage"), ProblemFilters.exclude[ReversedMissingMethodProblem]( - "scalafix.rule.RuleCtx.filterLintMessage" - ), + "scalafix.rule.RuleCtx.filterLintMessage"), ProblemFilters.exclude[DirectMissingMethodProblem]( - "scalafix.cli.CliRunner.this" - ) + "scalafix.cli.CliRunner.this"), + ProblemFilters.exclude[FinalClassProblem]("scalafix.util.TokenList"), + ProblemFilters.exclude[ReversedMissingMethodProblem]( + "scalafix.util.TokenList.leadingSpaces"), + ProblemFilters.exclude[ReversedMissingMethodProblem]( + "scalafix.util.TokenList.trailingSpaces") ) } } diff --git a/scalafix-core/shared/src/main/scala/scalafix/internal/rule/RuleCtxImpl.scala b/scalafix-core/shared/src/main/scala/scalafix/internal/rule/RuleCtxImpl.scala index 2ae45f29e..b2de9043f 100644 --- a/scalafix-core/shared/src/main/scala/scalafix/internal/rule/RuleCtxImpl.scala +++ b/scalafix-core/shared/src/main/scala/scalafix/internal/rule/RuleCtxImpl.scala @@ -32,7 +32,7 @@ case class RuleCtxImpl(tree: Tree, config: ScalafixConfig) extends RuleCtx { override def toString: String = syntax def toks(t: Tree): Tokens = t.tokens(config.dialect) lazy val tokens: Tokens = tree.tokens(config.dialect) - lazy val tokenList: TokenList = new TokenList(tokens) + lazy val tokenList: TokenList = TokenList(tokens) lazy val matchingParens: MatchingParens = MatchingParens(tokens) lazy val comments: AssociatedComments = AssociatedComments(tokens) lazy val input: Input = tokens.head.input diff --git a/scalafix-core/shared/src/main/scala/scalafix/util/TokenList.scala b/scalafix-core/shared/src/main/scala/scalafix/util/TokenList.scala index 583f93eda..0f8a62593 100644 --- a/scalafix-core/shared/src/main/scala/scalafix/util/TokenList.scala +++ b/scalafix-core/shared/src/main/scala/scalafix/util/TokenList.scala @@ -6,7 +6,7 @@ import scala.meta.tokens.Token import scala.meta.tokens.Tokens /** Helper to traverse tokens as a doubly linked list. */ -class TokenList(tokens: Tokens) { +final class TokenList private (tokens: Tokens) { def trailing(token: Token): SeqView[Token, IndexedSeq[Token]] = tokens.view(tok2idx(token), tokens.length).drop(1) def leading(token: Token): SeqView[Token, IndexedSeq[Token]] = @@ -59,4 +59,13 @@ class TokenList(tokens: Tokens) { } } + def leadingSpaces(token: Token): SeqView[Token, IndexedSeq[Token]] = + leading(token).takeWhile(_.is[Token.Space]) + + def trailingSpaces(token: Token): SeqView[Token, IndexedSeq[Token]] = + trailing(token).takeWhile(_.is[Token.Space]) +} + +object TokenList { + def apply(tokens: Tokens): TokenList = new TokenList(tokens) } diff --git a/scalafix-tests/unit/src/test/scala/scalafix/tests/core/util/TokenListTest.scala b/scalafix-tests/unit/src/test/scala/scalafix/tests/core/util/TokenListTest.scala index 54edd7e4a..486e3ca0b 100644 --- a/scalafix-tests/unit/src/test/scala/scalafix/tests/core/util/TokenListTest.scala +++ b/scalafix-tests/unit/src/test/scala/scalafix/tests/core/util/TokenListTest.scala @@ -6,21 +6,90 @@ import scalafix.util.TokenList class TokenListTest extends FunSuite { - val tokens = "".tokenize.get // this contains two tokens: beginningOfFile and endOfFile - val firstToken = tokens.head - val lastToken = tokens.last - val tokenList = new TokenList(tokens) + val emptyFileTokens = "".tokenize.get // this contains two tokens: BOF and EOF + val nonEmptyFileTokens = + """package foo + | + |object Bar { + | val baz = 10 + |} + """.stripMargin.tokenize.get - test("Prev returns the firstToken when is given the firstToken") { - assert(tokenList.prev(firstToken) == firstToken) - assert(tokenList.prev(tokens.last) == firstToken) + test("prev returns the preceding token") { + val tokenList = TokenList(emptyFileTokens) + assert(tokenList.prev(emptyFileTokens.last) == emptyFileTokens.head) } - test("Slice gives an empty list with same token as inputs") { - assert(tokenList.slice(firstToken, firstToken) == Seq()) + test("prev returns self if there is no preceding token") { + val tokenList = TokenList(emptyFileTokens) + assert(tokenList.prev(emptyFileTokens.head) == emptyFileTokens.head) } - test("Next breaks gives the lastToken if it is given lastToken") { - assert(tokenList.next(lastToken) == lastToken) + test("next returns the following token") { + val tokenList = TokenList(emptyFileTokens) + assert(tokenList.next(emptyFileTokens.head) == emptyFileTokens.last) + } + + test("next returns self if there is no following token") { + val tokenList = TokenList(emptyFileTokens) + assert(tokenList.next(emptyFileTokens.last) == emptyFileTokens.last) + } + + test("slice returns an empty seq if there is no token in between") { + val tokenList = TokenList(emptyFileTokens) + assert(tokenList.slice(emptyFileTokens.head, emptyFileTokens.head) == Seq()) + } + + test("slice returns all tokens between from/to tokens") { + val Some(kwObject) = nonEmptyFileTokens.find(_.is[Token.KwObject]) + val Some(leftBrace) = nonEmptyFileTokens.find(_.is[Token.LeftBrace]) + val tokenList = TokenList(nonEmptyFileTokens) + + val slice = tokenList.slice(kwObject, leftBrace) + assert(slice.size == 3) + val Seq(space1, bar, space2) = slice + assert(space1.is[Token.Space]) + assert(bar.syntax.equals("Bar")) + assert(space2.is[Token.Space]) + } + + test("leadingSpaces returns all spaces preceding a token") { + val Some(equals) = nonEmptyFileTokens.find(_.is[Token.Equals]) + val tokenList = TokenList(nonEmptyFileTokens) + + val spaces = tokenList.leadingSpaces(equals) + assert(spaces.size == 3) + val Seq(s1, s2, s3) = spaces + assert(s1 == tokenList.prev(equals)) + assert(s2 == tokenList.prev(s1)) + assert(s3 == tokenList.prev(s2)) + } + + test( + "leadingSpaces returns an empty seq if there are no spaces preceding a token") { + val Some(kwPackage) = nonEmptyFileTokens.find(_.is[Token.KwPackage]) + val tokenList = TokenList(nonEmptyFileTokens) + + assert(tokenList.leadingSpaces(kwPackage) == Seq()) + } + + test("trailingSpaces returns all spaces following a token") { + val Some(equals) = nonEmptyFileTokens.find(_.is[Token.Equals]) + val tokenList = TokenList(nonEmptyFileTokens) + + val spaces = tokenList.trailingSpaces(equals) + assert(spaces.size == 3) + val Seq(s1, s2, s3) = spaces + assert(s1 == tokenList.next(equals)) + assert(s2 == tokenList.next(s1)) + assert(s3 == tokenList.next(s2)) + } + + test( + "trailingSpaces returns an empty seq if there are no spaces following a token") { + val Some(rightBrace) = nonEmptyFileTokens.find(_.is[Token.RightBrace]) + val tokenList = TokenList(nonEmptyFileTokens) + + assert(tokenList.trailingSpaces(rightBrace) == Seq()) } }