diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Access.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Access.scala index 6fb67be..c646e67 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Access.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Access.scala @@ -50,8 +50,8 @@ final case class AccessMessage( sealedValue: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.privateAccess.isDefined) { val __value = sealedValue.privateAccess.get @@ -84,12 +84,13 @@ final case class AccessMessage( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.privateAccess.foreach { __v => @@ -378,8 +379,8 @@ final case class PrivateWithinAccess( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -391,12 +392,13 @@ final case class PrivateWithinAccess( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -537,8 +539,8 @@ final case class ProtectedWithinAccess( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -550,12 +552,13 @@ final case class ProtectedWithinAccess( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Annotation.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Annotation.scala index 214354b..2cb478d 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Annotation.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Annotation.scala @@ -13,30 +13,31 @@ final case class Annotation( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toBase(tpe) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toBase(tpe) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Constant.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Constant.scala index 1f8f777..0ca96d9 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Constant.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Constant.scala @@ -58,8 +58,8 @@ final case class ConstantMessage( sealedValue: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.unitConstant.isDefined) { val __value = sealedValue.unitConstant.get @@ -108,12 +108,13 @@ final case class ConstantMessage( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.unitConstant.foreach { __v => @@ -441,8 +442,8 @@ final case class BooleanConstant( value: _root_.scala.Boolean = false ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -454,12 +455,13 @@ final case class BooleanConstant( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -518,8 +520,8 @@ final case class ByteConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -531,12 +533,13 @@ final case class ByteConstant( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -595,8 +598,8 @@ final case class ShortConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -608,12 +611,13 @@ final case class ShortConstant( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -672,8 +676,8 @@ final case class CharConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -685,12 +689,13 @@ final case class CharConstant( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -749,8 +754,8 @@ final case class IntConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -762,12 +767,13 @@ final case class IntConstant( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -826,8 +832,8 @@ final case class LongConstant( value: _root_.scala.Long = 0L ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -839,12 +845,13 @@ final case class LongConstant( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -903,8 +910,8 @@ final case class FloatConstant( value: _root_.scala.Float = 0.0f ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -916,12 +923,13 @@ final case class FloatConstant( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -980,8 +988,8 @@ final case class DoubleConstant( value: _root_.scala.Double = 0.0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -993,12 +1001,13 @@ final case class DoubleConstant( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1057,8 +1066,8 @@ final case class StringConstant( value: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -1070,12 +1079,13 @@ final case class StringConstant( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Diagnostic.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Diagnostic.scala index 0478dfa..cc8aa82 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Diagnostic.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Diagnostic.scala @@ -15,8 +15,8 @@ final case class Diagnostic( message: _root_.scala.Predef.String = "" ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get @@ -39,12 +39,13 @@ final case class Diagnostic( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -129,6 +130,7 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. object Severity { sealed trait Recognized extends Severity + @SerialVersionUID(0L) case object UNKNOWN_SEVERITY extends Severity(0) with Severity.Recognized { val index = 0 @@ -166,7 +168,6 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Severity(unrecognizedValue) with SemanticdbUnrecognizedEnum - lazy val values = scala.collection.immutable.Seq(UNKNOWN_SEVERITY, ERROR, WARNING, INFORMATION, HINT) def fromValue(__value: _root_.scala.Int): Severity = __value match { case 0 => UNKNOWN_SEVERITY diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Documentation.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Documentation.scala index c2c2fc4..07fbda4 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Documentation.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Documentation.scala @@ -14,8 +14,8 @@ final case class Documentation( format: dotty.tools.dotc.semanticdb.Documentation.Format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -34,12 +34,13 @@ final case class Documentation( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -110,6 +111,7 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do object Format { sealed trait Recognized extends Format + @SerialVersionUID(0L) case object HTML extends Format(0) with Format.Recognized { val index = 0 @@ -147,7 +149,6 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Format(unrecognizedValue) with SemanticdbUnrecognizedEnum - lazy val values = scala.collection.immutable.Seq(HTML, MARKDOWN, JAVADOC, SCALADOC, KDOC) def fromValue(__value: _root_.scala.Int): Format = __value match { case 0 => HTML diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Language.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Language.scala index 7007f29..c57a3d3 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Language.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Language.scala @@ -20,6 +20,7 @@ sealed abstract class Language(val value: _root_.scala.Int) extends SemanticdbG object Language { sealed trait Recognized extends Language + @SerialVersionUID(0L) case object UNKNOWN_LANGUAGE extends Language(0) with Language.Recognized { val index = 0 @@ -43,7 +44,6 @@ object Language { @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Language(unrecognizedValue) with SemanticdbUnrecognizedEnum - lazy val values = scala.collection.immutable.Seq(UNKNOWN_LANGUAGE, SCALA, JAVA) def fromValue(__value: _root_.scala.Int): Language = __value match { case 0 => UNKNOWN_LANGUAGE diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Location.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Location.scala index b58fb8c..a3667e9 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Location.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Location.scala @@ -14,8 +14,8 @@ final case class Location( range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -31,12 +31,13 @@ final case class Location( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Range.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Range.scala index f8b1675..d273664 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Range.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Range.scala @@ -16,8 +16,8 @@ final case class Range( endCharacter: _root_.scala.Int = 0 ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -50,12 +50,13 @@ final case class Range( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Schema.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Schema.scala index 492e164..841e691 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Schema.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Schema.scala @@ -20,6 +20,7 @@ sealed abstract class Schema(val value: _root_.scala.Int) extends SemanticdbGen object Schema { sealed trait Recognized extends Schema + @SerialVersionUID(0L) case object LEGACY extends Schema(0) with Schema.Recognized { val index = 0 @@ -43,7 +44,6 @@ object Schema { @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Schema(unrecognizedValue) with SemanticdbUnrecognizedEnum - lazy val values = scala.collection.immutable.Seq(LEGACY, SEMANTICDB3, SEMANTICDB4) def fromValue(__value: _root_.scala.Int): Schema = __value match { case 0 => LEGACY diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Scope.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Scope.scala index f396929..655ebe7 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Scope.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Scope.scala @@ -14,8 +14,8 @@ final case class Scope( hardlinks: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation] = _root_.scala.Seq.empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 symlinks.foreach { __item => val __value = __item @@ -28,12 +28,13 @@ final case class Scope( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { symlinks.foreach { __v => @@ -48,11 +49,11 @@ final case class Scope( }; } def clearSymlinks = copy(symlinks = _root_.scala.Seq.empty) - def addSymlinks(__vs: _root_.scala.Predef.String*): Scope = addAllSymlinks(__vs) + def addSymlinks(__vs: _root_.scala.Predef.String *): Scope = addAllSymlinks(__vs) def addAllSymlinks(__vs: Iterable[_root_.scala.Predef.String]): Scope = copy(symlinks = symlinks ++ __vs) def withSymlinks(__v: _root_.scala.Seq[_root_.scala.Predef.String]): Scope = copy(symlinks = __v) def clearHardlinks = copy(hardlinks = _root_.scala.Seq.empty) - def addHardlinks(__vs: dotty.tools.dotc.semanticdb.SymbolInformation*): Scope = addAllHardlinks(__vs) + def addHardlinks(__vs: dotty.tools.dotc.semanticdb.SymbolInformation *): Scope = addAllHardlinks(__vs) def addAllHardlinks(__vs: Iterable[dotty.tools.dotc.semanticdb.SymbolInformation]): Scope = copy(hardlinks = hardlinks ++ __vs) def withHardlinks(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation]): Scope = copy(hardlinks = __v) diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Signature.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Signature.scala index 9f1bff5..228e2f0 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Signature.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Signature.scala @@ -44,8 +44,8 @@ final case class SignatureMessage( sealedValue: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.classSignature.isDefined) { val __value = sealedValue.classSignature.get @@ -66,12 +66,13 @@ final case class SignatureMessage( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.classSignature.foreach { __v => @@ -221,8 +222,8 @@ final case class ClassSignature( declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -235,7 +236,7 @@ final case class ClassSignature( { val __value = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toBase(self) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -246,12 +247,13 @@ final case class ClassSignature( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { typeParameters.foreach { __v => @@ -268,7 +270,7 @@ final case class ClassSignature( }; { val __v = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toBase(self) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(3, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -285,7 +287,7 @@ final case class ClassSignature( def clearTypeParameters: ClassSignature = copy(typeParameters = _root_.scala.None) def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): ClassSignature = copy(typeParameters = Option(__v)) def clearParents = copy(parents = _root_.scala.Seq.empty) - def addParents(__vs: dotty.tools.dotc.semanticdb.Type*): ClassSignature = addAllParents(__vs) + def addParents(__vs: dotty.tools.dotc.semanticdb.Type *): ClassSignature = addAllParents(__vs) def addAllParents(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): ClassSignature = copy(parents = parents ++ __vs) def withParents(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): ClassSignature = copy(parents = __v) def withSelf(__v: dotty.tools.dotc.semanticdb.Type): ClassSignature = copy(self = __v) @@ -370,8 +372,8 @@ final case class MethodSignature( returnType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -384,19 +386,20 @@ final case class MethodSignature( { val __value = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toBase(returnType) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { typeParameters.foreach { __v => @@ -413,7 +416,7 @@ final case class MethodSignature( }; { val __v = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toBase(returnType) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(3, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -424,7 +427,7 @@ final case class MethodSignature( def clearTypeParameters: MethodSignature = copy(typeParameters = _root_.scala.None) def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): MethodSignature = copy(typeParameters = Option(__v)) def clearParameterLists = copy(parameterLists = _root_.scala.Seq.empty) - def addParameterLists(__vs: dotty.tools.dotc.semanticdb.Scope*): MethodSignature = addAllParameterLists(__vs) + def addParameterLists(__vs: dotty.tools.dotc.semanticdb.Scope *): MethodSignature = addAllParameterLists(__vs) def addAllParameterLists(__vs: Iterable[dotty.tools.dotc.semanticdb.Scope]): MethodSignature = copy(parameterLists = parameterLists ++ __vs) def withParameterLists(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Scope]): MethodSignature = copy(parameterLists = __v) def withReturnType(__v: dotty.tools.dotc.semanticdb.Type): MethodSignature = copy(returnType = __v) @@ -496,8 +499,8 @@ final case class TypeSignature( upperBound: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -506,26 +509,27 @@ final case class TypeSignature( { val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toBase(lowerBound) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; { val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toBase(upperBound) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { typeParameters.foreach { __v => @@ -536,7 +540,7 @@ final case class TypeSignature( }; { val __v = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toBase(lowerBound) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -544,7 +548,7 @@ final case class TypeSignature( }; { val __v = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toBase(upperBound) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(3, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -624,30 +628,31 @@ final case class ValueSignature( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toBase(tpe) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toBase(tpe) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/SymbolInformation.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/SymbolInformation.scala index f6d0605..93fbb20 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/SymbolInformation.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/SymbolInformation.scala @@ -22,8 +22,8 @@ final case class SymbolInformation( documentation: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation] = _root_.scala.None ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -63,7 +63,7 @@ final case class SymbolInformation( { val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toBase(signature) - if (__value != dotty.tools.dotc.semanticdb.SignatureMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -74,7 +74,7 @@ final case class SymbolInformation( { val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toBase(access) - if (__value != dotty.tools.dotc.semanticdb.AccessMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -89,12 +89,13 @@ final case class SymbolInformation( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -135,7 +136,7 @@ final case class SymbolInformation( }; { val __v = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toBase(signature) - if (__v != dotty.tools.dotc.semanticdb.SignatureMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(17, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -143,7 +144,7 @@ final case class SymbolInformation( }; { val __v = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toBase(access) - if (__v != dotty.tools.dotc.semanticdb.AccessMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(18, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -167,12 +168,12 @@ final case class SymbolInformation( def withDisplayName(__v: _root_.scala.Predef.String): SymbolInformation = copy(displayName = __v) def withSignature(__v: dotty.tools.dotc.semanticdb.Signature): SymbolInformation = copy(signature = __v) def clearAnnotations = copy(annotations = _root_.scala.Seq.empty) - def addAnnotations(__vs: dotty.tools.dotc.semanticdb.Annotation*): SymbolInformation = addAllAnnotations(__vs) + def addAnnotations(__vs: dotty.tools.dotc.semanticdb.Annotation *): SymbolInformation = addAllAnnotations(__vs) def addAllAnnotations(__vs: Iterable[dotty.tools.dotc.semanticdb.Annotation]): SymbolInformation = copy(annotations = annotations ++ __vs) def withAnnotations(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation]): SymbolInformation = copy(annotations = __v) def withAccess(__v: dotty.tools.dotc.semanticdb.Access): SymbolInformation = copy(access = __v) def clearOverriddenSymbols = copy(overriddenSymbols = _root_.scala.Seq.empty) - def addOverriddenSymbols(__vs: _root_.scala.Predef.String*): SymbolInformation = addAllOverriddenSymbols(__vs) + def addOverriddenSymbols(__vs: _root_.scala.Predef.String *): SymbolInformation = addAllOverriddenSymbols(__vs) def addAllOverriddenSymbols(__vs: Iterable[_root_.scala.Predef.String]): SymbolInformation = copy(overriddenSymbols = overriddenSymbols ++ __vs) def withOverriddenSymbols(__v: _root_.scala.Seq[_root_.scala.Predef.String]): SymbolInformation = copy(overriddenSymbols = __v) def getDocumentation: dotty.tools.dotc.semanticdb.Documentation = documentation.getOrElse(dotty.tools.dotc.semanticdb.Documentation.defaultInstance) @@ -282,6 +283,7 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool object Kind { sealed trait Recognized extends Kind + @SerialVersionUID(0L) case object UNKNOWN_KIND extends Kind(0) with Kind.Recognized { val index = 0 @@ -396,7 +398,6 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Kind(unrecognizedValue) with SemanticdbUnrecognizedEnum - lazy val values = scala.collection.immutable.Seq(UNKNOWN_KIND, LOCAL, FIELD, METHOD, CONSTRUCTOR, MACRO, TYPE, PARAMETER, SELF_PARAMETER, TYPE_PARAMETER, OBJECT, PACKAGE, PACKAGE_OBJECT, CLASS, TRAIT, INTERFACE) def fromValue(__value: _root_.scala.Int): Kind = __value match { case 0 => UNKNOWN_KIND @@ -450,6 +451,7 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool object Property { sealed trait Recognized extends Property + @SerialVersionUID(0L) case object UNKNOWN_PROPERTY extends Property(0) with Property.Recognized { val index = 0 @@ -599,7 +601,6 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Property(unrecognizedValue) with SemanticdbUnrecognizedEnum - lazy val values = scala.collection.immutable.Seq(UNKNOWN_PROPERTY, ABSTRACT, FINAL, SEALED, IMPLICIT, LAZY, CASE, COVARIANT, CONTRAVARIANT, VAL, VAR, STATIC, PRIMARY, ENUM, DEFAULT, GIVEN, INLINE, OPEN, TRANSPARENT, INFIX, OPAQUE) def fromValue(__value: _root_.scala.Int): Property = __value match { case 0 => UNKNOWN_PROPERTY diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/SymbolOccurrence.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/SymbolOccurrence.scala index ea5ecfc..5d7670d 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/SymbolOccurrence.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/SymbolOccurrence.scala @@ -15,8 +15,8 @@ final case class SymbolOccurrence( role: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.UNKNOWN_ROLE ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get @@ -39,12 +39,13 @@ final case class SymbolOccurrence( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -127,6 +128,7 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools object Role { sealed trait Recognized extends Role + @SerialVersionUID(0L) case object UNKNOWN_ROLE extends Role(0) with Role.Recognized { val index = 0 @@ -150,7 +152,6 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Role(unrecognizedValue) with SemanticdbUnrecognizedEnum - lazy val values = scala.collection.immutable.Seq(UNKNOWN_ROLE, REFERENCE, DEFINITION) def fromValue(__value: _root_.scala.Int): Role = __value match { case 0 => UNKNOWN_ROLE diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Synthetic.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Synthetic.scala index ab3301f..3c6fcfb 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Synthetic.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Synthetic.scala @@ -14,8 +14,8 @@ final case class Synthetic( tree: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get @@ -24,19 +24,20 @@ final case class Synthetic( { val __value = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toBase(tree) - if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -47,7 +48,7 @@ final case class Synthetic( }; { val __v = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toBase(tree) - if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/TextDocument.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/TextDocument.scala index 5f377b2..f0347e8 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/TextDocument.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/TextDocument.scala @@ -21,8 +21,8 @@ final case class TextDocument( synthetics: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic] = _root_.scala.Seq.empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -78,12 +78,13 @@ final case class TextDocument( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -147,19 +148,19 @@ final case class TextDocument( def withMd5(__v: _root_.scala.Predef.String): TextDocument = copy(md5 = __v) def withLanguage(__v: dotty.tools.dotc.semanticdb.Language): TextDocument = copy(language = __v) def clearSymbols = copy(symbols = _root_.scala.Seq.empty) - def addSymbols(__vs: dotty.tools.dotc.semanticdb.SymbolInformation*): TextDocument = addAllSymbols(__vs) + def addSymbols(__vs: dotty.tools.dotc.semanticdb.SymbolInformation *): TextDocument = addAllSymbols(__vs) def addAllSymbols(__vs: Iterable[dotty.tools.dotc.semanticdb.SymbolInformation]): TextDocument = copy(symbols = symbols ++ __vs) def withSymbols(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation]): TextDocument = copy(symbols = __v) def clearOccurrences = copy(occurrences = _root_.scala.Seq.empty) - def addOccurrences(__vs: dotty.tools.dotc.semanticdb.SymbolOccurrence*): TextDocument = addAllOccurrences(__vs) + def addOccurrences(__vs: dotty.tools.dotc.semanticdb.SymbolOccurrence *): TextDocument = addAllOccurrences(__vs) def addAllOccurrences(__vs: Iterable[dotty.tools.dotc.semanticdb.SymbolOccurrence]): TextDocument = copy(occurrences = occurrences ++ __vs) def withOccurrences(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolOccurrence]): TextDocument = copy(occurrences = __v) def clearDiagnostics = copy(diagnostics = _root_.scala.Seq.empty) - def addDiagnostics(__vs: dotty.tools.dotc.semanticdb.Diagnostic*): TextDocument = addAllDiagnostics(__vs) + def addDiagnostics(__vs: dotty.tools.dotc.semanticdb.Diagnostic *): TextDocument = addAllDiagnostics(__vs) def addAllDiagnostics(__vs: Iterable[dotty.tools.dotc.semanticdb.Diagnostic]): TextDocument = copy(diagnostics = diagnostics ++ __vs) def withDiagnostics(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Diagnostic]): TextDocument = copy(diagnostics = __v) def clearSynthetics = copy(synthetics = _root_.scala.Seq.empty) - def addSynthetics(__vs: dotty.tools.dotc.semanticdb.Synthetic*): TextDocument = addAllSynthetics(__vs) + def addSynthetics(__vs: dotty.tools.dotc.semanticdb.Synthetic *): TextDocument = addAllSynthetics(__vs) def addAllSynthetics(__vs: Iterable[dotty.tools.dotc.semanticdb.Synthetic]): TextDocument = copy(synthetics = synthetics ++ __vs) def withSynthetics(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic]): TextDocument = copy(synthetics = __v) diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/TextDocuments.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/TextDocuments.scala index 8bc46a1..41b8e1b 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/TextDocuments.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/TextDocuments.scala @@ -13,8 +13,8 @@ final case class TextDocuments( documents: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument] = _root_.scala.Seq.empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 documents.foreach { __item => val __value = __item @@ -23,12 +23,13 @@ final case class TextDocuments( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { documents.foreach { __v => @@ -39,7 +40,7 @@ final case class TextDocuments( }; } def clearDocuments = copy(documents = _root_.scala.Seq.empty) - def addDocuments(__vs: dotty.tools.dotc.semanticdb.TextDocument*): TextDocuments = addAllDocuments(__vs) + def addDocuments(__vs: dotty.tools.dotc.semanticdb.TextDocument *): TextDocuments = addAllDocuments(__vs) def addAllDocuments(__vs: Iterable[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = documents ++ __vs) def withDocuments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = __v) diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Tree.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Tree.scala index c6b1078..ed84d9b 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Tree.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Tree.scala @@ -52,8 +52,8 @@ final case class TreeMessage( sealedValue: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.applyTree.isDefined) { val __value = sealedValue.applyTree.get @@ -90,12 +90,13 @@ final case class TreeMessage( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.applyTree.foreach { __v => @@ -323,13 +324,13 @@ final case class ApplyTree( arguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toBase(function) - if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -340,17 +341,18 @@ final case class ApplyTree( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toBase(function) - if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -365,7 +367,7 @@ final case class ApplyTree( } def withFunction(__v: dotty.tools.dotc.semanticdb.Tree): ApplyTree = copy(function = __v) def clearArguments = copy(arguments = _root_.scala.Seq.empty) - def addArguments(__vs: dotty.tools.dotc.semanticdb.Tree*): ApplyTree = addAllArguments(__vs) + def addArguments(__vs: dotty.tools.dotc.semanticdb.Tree *): ApplyTree = addAllArguments(__vs) def addAllArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Tree]): ApplyTree = copy(arguments = arguments ++ __vs) def withArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree]): ApplyTree = copy(arguments = __v) @@ -429,8 +431,8 @@ final case class FunctionTree( body: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 parameters.foreach { __item => val __value = __item @@ -439,19 +441,20 @@ final case class FunctionTree( { val __value = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toBase(body) - if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { parameters.foreach { __v => @@ -462,7 +465,7 @@ final case class FunctionTree( }; { val __v = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toBase(body) - if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -470,7 +473,7 @@ final case class FunctionTree( }; } def clearParameters = copy(parameters = _root_.scala.Seq.empty) - def addParameters(__vs: dotty.tools.dotc.semanticdb.IdTree*): FunctionTree = addAllParameters(__vs) + def addParameters(__vs: dotty.tools.dotc.semanticdb.IdTree *): FunctionTree = addAllParameters(__vs) def addAllParameters(__vs: Iterable[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = parameters ++ __vs) def withParameters(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = __v) def withBody(__v: dotty.tools.dotc.semanticdb.Tree): FunctionTree = copy(body = __v) @@ -532,8 +535,8 @@ final case class IdTree( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -545,12 +548,13 @@ final case class IdTree( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -609,30 +613,31 @@ final case class LiteralTree( constant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toBase(constant) - if (__value != dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toBase(constant) - if (__v != dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -691,37 +696,38 @@ final case class MacroExpansionTree( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toBase(beforeExpansion) - if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; { val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toBase(tpe) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toBase(beforeExpansion) - if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -729,7 +735,7 @@ final case class MacroExpansionTree( }; { val __v = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toBase(tpe) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -798,8 +804,8 @@ final case class OriginalTree( range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get @@ -808,12 +814,13 @@ final case class OriginalTree( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -875,13 +882,13 @@ final case class SelectTree( id: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toBase(qualifier) - if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -892,17 +899,18 @@ final case class SelectTree( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toBase(qualifier) - if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -978,13 +986,13 @@ final case class TypeApplyTree( typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toBase(function) - if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -995,17 +1003,18 @@ final case class TypeApplyTree( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toBase(function) - if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -1020,7 +1029,7 @@ final case class TypeApplyTree( } def withFunction(__v: dotty.tools.dotc.semanticdb.Tree): TypeApplyTree = copy(function = __v) def clearTypeArguments = copy(typeArguments = _root_.scala.Seq.empty) - def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type*): TypeApplyTree = addAllTypeArguments(__vs) + def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type *): TypeApplyTree = addAllTypeArguments(__vs) def addAllTypeArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): TypeApplyTree = copy(typeArguments = typeArguments ++ __vs) def withTypeArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): TypeApplyTree = copy(typeArguments = __v) diff --git a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Type.scala b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Type.scala index 17d2d28..6e4be1d 100644 --- a/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Type.scala +++ b/output/src/main/scala/generated/dotty/tools/dotc/semanticdb/Type.scala @@ -64,8 +64,8 @@ final case class TypeMessage( sealedValue: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.typeRef.isDefined) { val __value = sealedValue.typeRef.get @@ -126,12 +126,13 @@ final case class TypeMessage( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.typeRef.foreach { __v => @@ -480,13 +481,13 @@ final case class TypeRef( typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toBase(prefix) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -504,17 +505,18 @@ final case class TypeRef( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toBase(prefix) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -536,7 +538,7 @@ final case class TypeRef( def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): TypeRef = copy(prefix = __v) def withSymbol(__v: _root_.scala.Predef.String): TypeRef = copy(symbol = __v) def clearTypeArguments = copy(typeArguments = _root_.scala.Seq.empty) - def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type*): TypeRef = addAllTypeArguments(__vs) + def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type *): TypeRef = addAllTypeArguments(__vs) def addAllTypeArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): TypeRef = copy(typeArguments = typeArguments ++ __vs) def withTypeArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): TypeRef = copy(typeArguments = __v) @@ -608,13 +610,13 @@ final case class SingleType( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toBase(prefix) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -628,17 +630,18 @@ final case class SingleType( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toBase(prefix) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -711,8 +714,8 @@ final case class ThisType( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -724,12 +727,13 @@ final case class ThisType( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -789,13 +793,13 @@ final case class SuperType( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toBase(prefix) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -809,17 +813,18 @@ final case class SuperType( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toBase(prefix) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -892,30 +897,31 @@ final case class ConstantType( constant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toBase(constant) - if (__value != dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toBase(constant) - if (__v != dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -973,8 +979,8 @@ final case class IntersectionType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.IntersectionType._typemapper_types.toBase(__item) @@ -983,12 +989,13 @@ final case class IntersectionType( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { types.foreach { __v => @@ -999,7 +1006,7 @@ final case class IntersectionType( }; } def clearTypes = copy(types = _root_.scala.Seq.empty) - def addTypes(__vs: dotty.tools.dotc.semanticdb.Type*): IntersectionType = addAllTypes(__vs) + def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): IntersectionType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = types ++ __vs) def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = __v) @@ -1052,8 +1059,8 @@ final case class UnionType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.UnionType._typemapper_types.toBase(__item) @@ -1062,12 +1069,13 @@ final case class UnionType( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { types.foreach { __v => @@ -1078,7 +1086,7 @@ final case class UnionType( }; } def clearTypes = copy(types = _root_.scala.Seq.empty) - def addTypes(__vs: dotty.tools.dotc.semanticdb.Type*): UnionType = addAllTypes(__vs) + def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): UnionType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = types ++ __vs) def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = __v) @@ -1131,8 +1139,8 @@ final case class WithType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.WithType._typemapper_types.toBase(__item) @@ -1141,12 +1149,13 @@ final case class WithType( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { types.foreach { __v => @@ -1157,7 +1166,7 @@ final case class WithType( }; } def clearTypes = copy(types = _root_.scala.Seq.empty) - def addTypes(__vs: dotty.tools.dotc.semanticdb.Type*): WithType = addAllTypes(__vs) + def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): WithType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = types ++ __vs) def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = __v) @@ -1211,13 +1220,13 @@ final case class StructuralType( declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toBase(tpe) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -1228,17 +1237,18 @@ final case class StructuralType( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toBase(tpe) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(4, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -1314,8 +1324,8 @@ final case class AnnotatedType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 annotations.foreach { __item => val __value = __item @@ -1324,24 +1334,25 @@ final case class AnnotatedType( { val __value = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toBase(tpe) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toBase(tpe) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -1355,7 +1366,7 @@ final case class AnnotatedType( }; } def clearAnnotations = copy(annotations = _root_.scala.Seq.empty) - def addAnnotations(__vs: dotty.tools.dotc.semanticdb.Annotation*): AnnotatedType = addAllAnnotations(__vs) + def addAnnotations(__vs: dotty.tools.dotc.semanticdb.Annotation *): AnnotatedType = addAllAnnotations(__vs) def addAllAnnotations(__vs: Iterable[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = annotations ++ __vs) def withAnnotations(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = __v) def withTpe(__v: dotty.tools.dotc.semanticdb.Type): AnnotatedType = copy(tpe = __v) @@ -1418,13 +1429,13 @@ final case class ExistentialType( declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toBase(tpe) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; @@ -1435,17 +1446,18 @@ final case class ExistentialType( __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toBase(tpe) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -1521,8 +1533,8 @@ final case class UniversalType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -1531,24 +1543,25 @@ final case class UniversalType( { val __value = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toBase(tpe) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toBase(tpe) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -1623,30 +1636,31 @@ final case class ByNameType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toBase(tpe) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toBase(tpe) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) @@ -1704,30 +1718,31 @@ final case class RepeatedType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0 - private[this] def __computeSerializedValue(): _root_.scala.Int = { + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { val __value = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toBase(tpe) - if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; __size } override def serializedSize: _root_.scala.Int = { - var read = __serializedSizeCachedValue - if (read == 0) { - read = __computeSerializedValue() - __serializedSizeCachedValue = read + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size } - read + __size - 1 + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { val __v = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toBase(tpe) - if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) { + if (__v.serializedSize != 0) { _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__)